From 08418ea77f04b375e0fd68b407465f1224c0dcc9 Mon Sep 17 00:00:00 2001 From: Dragos Musetescu Date: Thu, 2 Apr 2026 15:47:13 +0000 Subject: [PATCH 1/2] fix(pi): normalize skill names and sync parity --- src/commands/plugin-path.ts | 16 +- src/converters/claude-to-pi.ts | 266 +- src/parsers/claude-home.ts | 3 + src/sync/commands.ts | 126 +- src/sync/json-config.ts | 65 +- src/sync/pi-artifact-status.ts | 17 + src/sync/pi-skills.ts | 129 + src/sync/pi.ts | 829 +++- src/targets/pi.ts | 476 ++- src/templates/pi/compat-extension.ts | 1166 +++++- src/types/claude.ts | 3 + src/types/js-yaml.d.ts | 4 + src/types/pi.ts | 1 + src/utils/files.ts | 549 ++- src/utils/pi-layout.ts | 104 + src/utils/pi-managed.ts | 970 +++++ src/utils/pi-policy.ts | 17 + src/utils/pi-skills.ts | 1557 +++++++ src/utils/pi-trust-contract.ts | 33 + tests/claude-home.test.ts | 21 + tests/files.test.ts | 111 + tests/path-sanitization.test.ts | 7 +- tests/pi-converter.test.ts | 221 +- tests/pi-writer.test.ts | 2104 +++++++++- tests/plugin-path.test.ts | 66 + tests/sync-pi.test.ts | 5703 +++++++++++++++++++++++++- 26 files changed, 14246 insertions(+), 318 deletions(-) create mode 100644 src/sync/pi-artifact-status.ts create mode 100644 src/sync/pi-skills.ts create mode 100644 src/types/js-yaml.d.ts create mode 100644 src/utils/pi-layout.ts create mode 100644 src/utils/pi-managed.ts create mode 100644 src/utils/pi-policy.ts create mode 100644 src/utils/pi-skills.ts create mode 100644 src/utils/pi-trust-contract.ts create mode 100644 tests/files.test.ts diff --git a/src/commands/plugin-path.ts b/src/commands/plugin-path.ts index d1d0b73cf..b39b94523 100644 --- a/src/commands/plugin-path.ts +++ b/src/commands/plugin-path.ts @@ -2,6 +2,7 @@ import { defineCommand } from "citty" import { promises as fs } from "fs" import os from "os" import path from "path" +import { assertNoSymlinkAncestors, assertNoSymlinkTarget, assertPathWithinRoot, assertSafePathComponent, ensureManagedDir } from "../utils/files" export default defineCommand({ meta: { @@ -21,7 +22,7 @@ export default defineCommand({ }, }, async run({ args }) { - const pluginName = String(args.plugin) + const pluginName = assertSafePathComponent(String(args.plugin), "plugin name") const branch = String(args.branch) // Reversible encoding: / -> ~ (safe because ~ is illegal in git branch names per @@ -32,10 +33,13 @@ export default defineCommand({ .replace(/[^a-zA-Z0-9._~-]/g, (ch) => `%${ch.charCodeAt(0).toString(16).padStart(2, "0")}`) const dirName = `${pluginName}-${sanitized}` const cacheRoot = path.join(os.homedir(), ".cache", "compound-engineering", "branches") - await fs.mkdir(cacheRoot, { recursive: true }) + await ensureManagedDir(cacheRoot) const targetDir = path.join(cacheRoot, dirName) const source = resolveGitHubSource() + await assertNoSymlinkAncestors(targetDir) + await assertNoSymlinkTarget(targetDir) + if (await dirExists(targetDir)) { console.error(`Updating existing checkout at ${targetDir}`) await fetchAndCheckout(targetDir, branch) @@ -45,6 +49,9 @@ export default defineCommand({ } const pluginPath = path.join(targetDir, "plugins", pluginName) + assertPathWithinRoot(pluginPath, path.join(targetDir, "plugins"), "plugin path") + await assertNoSymlinkAncestors(pluginPath) + await assertNoSymlinkTarget(pluginPath) if (!(await dirExists(pluginPath))) { throw new Error(`Plugin directory not found: ${pluginPath}`) } @@ -57,7 +64,10 @@ export default defineCommand({ async function dirExists(p: string): Promise { try { - const stat = await fs.stat(p) + const stat = await fs.lstat(p) + if (stat.isSymbolicLink()) { + throw new Error(`Refusing to use symlinked cache checkout ${p}`) + } return stat.isDirectory() } catch { return false diff --git a/src/converters/claude-to-pi.ts b/src/converters/claude-to-pi.ts index 92259906a..c5fbefca9 100644 --- a/src/converters/claude-to-pi.ts +++ b/src/converters/claude-to-pi.ts @@ -1,4 +1,7 @@ +import path from "path" import { formatFrontmatter } from "../utils/frontmatter" +import { appendCompatibilityNoteIfNeeded, normalizePiSkillName, transformPiBodyContent, uniquePiSkillName, type PiNameMaps } from "../utils/pi-skills" +import { isSafePiManagedName } from "../utils/pi-managed" import type { ClaudeAgent, ClaudeCommand, ClaudeMcpServer, ClaudePlugin } from "../types/claude" import type { PiBundle, @@ -9,22 +12,83 @@ import type { import type { ClaudeToOpenCodeOptions } from "./claude-to-opencode" import { PI_COMPAT_EXTENSION_SOURCE } from "../templates/pi/compat-extension" -export type ClaudeToPiOptions = ClaudeToOpenCodeOptions +export type ClaudeToPiOptions = ClaudeToOpenCodeOptions & { + extraNameMaps?: PiNameMaps + preserveUnknownQualifiedRefs?: boolean + rejectUnknownQualifiedTaskRefs?: boolean + rejectUnresolvedFirstPartyQualifiedRefs?: boolean +} const PI_DESCRIPTION_MAX_LENGTH = 1024 export function convertClaudeToPi( plugin: ClaudePlugin, - _options: ClaudeToPiOptions, + options: ClaudeToPiOptions, ): PiBundle { const promptNames = new Set() - const usedSkillNames = new Set(plugin.skills.map((skill) => normalizeName(skill.name))) + const usedSkillNames = new Set() + + const sortedSkills = [...plugin.skills].sort((a, b) => a.name < b.name ? -1 : a.name > b.name ? 1 : 0) + const sortedAgents = [...plugin.agents].sort((a, b) => a.name < b.name ? -1 : a.name > b.name ? 1 : 0) + + assertNoConfiguredSharedTargetConflicts(options.extraNameMaps?.skills, options.extraNameMaps?.agents) + reserveConfiguredPiTargetNames(sortedSkills.map((skill) => skill.name), options.extraNameMaps?.skills, usedSkillNames) + reserveConfiguredPiTargetNames(sortedAgents.map((agent) => agent.name), options.extraNameMaps?.agents, usedSkillNames) + reserveConfiguredPiTargetNames( + [...plugin.commands].filter((command) => !command.disableModelInvocation).map((command) => command.name), + options.extraNameMaps?.prompts, + promptNames, + ) + + const skillDirs = sortedSkills.map((skill) => ({ + name: resolvePiTargetName(skill.name, options.extraNameMaps?.skills, usedSkillNames), + sourceDir: skill.sourceDir, + sourceName: skill.name, + })) + + const agentNames = sortedAgents.map((agent) => + resolvePiTargetName(agent.name, options.extraNameMaps?.agents, usedSkillNames), + ) + + const agentMap: Record = {} + + const skillMap: Record = {} + sortedSkills.forEach((skill, i) => { + const emitted = skillDirs[i].name + skillMap[skill.name] = emitted + addQualifiedAlias(skillMap, plugin.manifest.name, skill.name, emitted) + }) - const prompts = plugin.commands + const convertibleCommands = [...plugin.commands] .filter((command) => !command.disableModelInvocation) - .map((command) => convertPrompt(command, promptNames)) + .sort((a, b) => a.name < b.name ? -1 : a.name > b.name ? 1 : 0) + const promptTargetNames = convertibleCommands.map((command) => + resolvePiTargetName(command.name, options.extraNameMaps?.prompts, promptNames), + ) + + const promptMap: Record = {} + convertibleCommands.forEach((command, i) => { + const emitted = promptTargetNames[i] + promptMap[command.name] = emitted + addQualifiedAlias(promptMap, plugin.manifest.name, command.name, emitted) + }) + + sortedAgents.forEach((agent, i) => { + const emitted = agentNames[i] + agentMap[agent.name] = emitted + addQualifiedAlias(agentMap, plugin.manifest.name, agent.name, emitted) + const qualifiedAgentAlias = buildQualifiedAgentAlias(plugin.root, plugin.manifest.name, agent) + if (qualifiedAgentAlias) { + agentMap[qualifiedAgentAlias] = emitted + } + }) + + const nameMaps: PiNameMaps = { agents: agentMap, skills: skillMap, prompts: promptMap } + const transformMaps = mergeNameMaps(nameMaps, options.extraNameMaps) + + const prompts = convertibleCommands.map((command, i) => convertPrompt(command, promptTargetNames[i], transformMaps, options)) - const generatedSkills = plugin.agents.map((agent) => convertAgent(agent, usedSkillNames)) + const generatedSkills = sortedAgents.map((agent, i) => convertAgent(agent, agentNames[i], transformMaps, options)) const extensions = [ { @@ -34,35 +98,59 @@ export function convertClaudeToPi( ] return { + pluginName: plugin.manifest.name, prompts, - skillDirs: plugin.skills.map((skill) => ({ - name: skill.name, - sourceDir: skill.sourceDir, - })), + skillDirs, generatedSkills, extensions, mcporterConfig: plugin.mcpServers ? convertMcpToMcporter(plugin.mcpServers) : undefined, + nameMaps, } } -function convertPrompt(command: ClaudeCommand, usedNames: Set) { - const name = uniqueName(normalizeName(command.name), usedNames) +function addQualifiedAlias(map: Record, pluginName: string | undefined, sourceName: string, emitted: string) { + if (!pluginName || !sourceName) return + map[`${pluginName}:${sourceName}`] = emitted +} + +function buildQualifiedAgentAlias(root: string, pluginName: string | undefined, agent: ClaudeAgent): string | undefined { + if (!pluginName) return undefined + + const agentsRoot = path.join(root, "agents") + const relative = path.relative(agentsRoot, agent.sourcePath) + if (!relative || relative.startsWith("..") || path.isAbsolute(relative)) { + return undefined + } + + const withoutExt = relative.replace(/\.md$/i, "") + const segments = withoutExt.split(path.sep).filter(Boolean) + if (segments.length <= 1) { + return `${pluginName}:${agent.name}` + } + + return [pluginName, ...segments.slice(0, -1), agent.name].join(":") +} + +function convertPrompt(command: ClaudeCommand, name: string, nameMaps: PiNameMaps, options: ClaudeToPiOptions) { const frontmatter: Record = { description: command.description, "argument-hint": command.argumentHint, } - let body = transformContentForPi(command.body) - body = appendCompatibilityNoteIfNeeded(body) + const body = appendCompatibilityNoteIfNeeded(transformPiBodyContent(command.body, nameMaps, { + preserveUnknownQualifiedRefs: options.preserveUnknownQualifiedRefs, + rejectUnknownQualifiedTaskRefs: options.rejectUnknownQualifiedTaskRefs, + rejectUnresolvedFirstPartyQualifiedRefs: options.rejectUnresolvedFirstPartyQualifiedRefs, + })) return { name, content: formatFrontmatter(frontmatter, body.trim()), + sourceName: command.name, } } -function convertAgent(agent: ClaudeAgent, usedNames: Set): PiGeneratedSkill { - const name = uniqueName(normalizeName(agent.name), usedNames) +function convertAgent(agent: ClaudeAgent, name: string, nameMaps: PiNameMaps, options: ClaudeToPiOptions): PiGeneratedSkill { const description = sanitizeDescription( agent.description ?? `Converted from Claude agent ${agent.name}`, ) @@ -77,77 +165,24 @@ function convertAgent(agent: ClaudeAgent, usedNames: Set): PiGeneratedSk sections.push(`## Capabilities\n${agent.capabilities.map((capability) => `- ${capability}`).join("\n")}`) } - const body = [ + const body = transformPiBodyContent([ ...sections, agent.body.trim().length > 0 ? agent.body.trim() : `Instructions converted from the ${agent.name} agent.`, - ].join("\n\n") + ].join("\n\n"), nameMaps, { + preserveUnknownQualifiedRefs: options.preserveUnknownQualifiedRefs, + rejectUnknownQualifiedTaskRefs: options.rejectUnknownQualifiedTaskRefs, + rejectUnresolvedFirstPartyQualifiedRefs: options.rejectUnresolvedFirstPartyQualifiedRefs, + }) return { name, content: formatFrontmatter(frontmatter, body), + sourceName: agent.name, } } -export function transformContentForPi(body: string): string { - let result = body - - // Task repo-research-analyst(feature_description) or Task compound-engineering:research:repo-research-analyst(args) - // -> Run subagent with agent="repo-research-analyst" and task="feature_description" - const taskPattern = /^(\s*-?\s*)Task\s+([a-z][a-z0-9:-]*)\(([^)]*)\)/gm - result = result.replace(taskPattern, (_match, prefix: string, agentName: string, args: string) => { - const finalSegment = agentName.includes(":") ? agentName.split(":").pop()! : agentName - const skillName = normalizeName(finalSegment) - const trimmedArgs = args.trim().replace(/\s+/g, " ") - return trimmedArgs - ? `${prefix}Run subagent with agent=\"${skillName}\" and task=\"${trimmedArgs}\".` - : `${prefix}Run subagent with agent=\"${skillName}\".` - }) - - // Claude-specific tool references - result = result.replace(/\bAskUserQuestion\b/g, "ask_user_question") - result = result.replace(/\bTodoWrite\b/g, "file-based todos (todos/ + /skill:todo-create)") - result = result.replace(/\bTodoRead\b/g, "file-based todos (todos/ + /skill:todo-create)") - - // /command-name or /workflows:command-name -> /workflows-command-name - const slashCommandPattern = /(? { - if (commandName.includes("/")) return match - if (["dev", "tmp", "etc", "usr", "var", "bin", "home"].includes(commandName)) { - return match - } - - if (commandName.startsWith("skill:")) { - const skillName = commandName.slice("skill:".length) - return `/skill:${normalizeName(skillName)}` - } - - const withoutPrefix = commandName.startsWith("prompts:") - ? commandName.slice("prompts:".length) - : commandName - - return `/${normalizeName(withoutPrefix)}` - }) - - return result -} - -function appendCompatibilityNoteIfNeeded(body: string): string { - if (!/\bmcp\b/i.test(body)) return body - - const note = [ - "", - "## Pi + MCPorter note", - "For MCP access in Pi, use MCPorter via the generated tools:", - "- `mcporter_list` to inspect available MCP tools", - "- `mcporter_call` to invoke a tool", - "", - ].join("\n") - - return body + note -} - function convertMcpToMcporter(servers: Record): PiMcporterConfig { const mcpServers: Record = {} @@ -173,19 +208,6 @@ function convertMcpToMcporter(servers: Record): PiMcpor return { mcpServers } } -function normalizeName(value: string): string { - const trimmed = value.trim() - if (!trimmed) return "item" - const normalized = trimmed - .toLowerCase() - .replace(/[\\/]+/g, "-") - .replace(/[:\s]+/g, "-") - .replace(/[^a-z0-9_-]+/g, "-") - .replace(/-+/g, "-") - .replace(/^-+|-+$/g, "") - return normalized || "item" -} - function sanitizeDescription(value: string, maxLength = PI_DESCRIPTION_MAX_LENGTH): string { const normalized = value.replace(/\s+/g, " ").trim() if (normalized.length <= maxLength) return normalized @@ -193,16 +215,62 @@ function sanitizeDescription(value: string, maxLength = PI_DESCRIPTION_MAX_LENGT return normalized.slice(0, Math.max(0, maxLength - ellipsis.length)).trimEnd() + ellipsis } -function uniqueName(base: string, used: Set): string { - if (!used.has(base)) { - used.add(base) - return base +function mergeNameMaps(primary: PiNameMaps, secondary?: PiNameMaps): PiNameMaps { + return { + agents: { ...(secondary?.agents ?? {}), ...(primary.agents ?? {}) }, + skills: { ...(secondary?.skills ?? {}), ...(primary.skills ?? {}) }, + prompts: { ...(secondary?.prompts ?? {}), ...(primary.prompts ?? {}) }, + } +} + +function resolvePiTargetName(sourceName: string, configuredMap: Record | undefined, usedNames: Set): string { + const configured = configuredMap?.[sourceName] + if (configured && isSafePiManagedName(configured)) { + usedNames.add(configured) + return configured } - let index = 2 - while (used.has(`${base}-${index}`)) { - index += 1 + + return uniquePiSkillName(normalizePiSkillName(sourceName), usedNames) +} + +function reserveConfiguredPiTargetNames( + sourceNames: string[], + configuredMap: Record | undefined, + usedNames: Set, +) { + const reservedBySource = new Map() + + for (const sourceName of sourceNames) { + const configured = configuredMap?.[sourceName] + if (!configured || !isSafePiManagedName(configured)) continue + + const existingSource = reservedBySource.get(configured) + if (existingSource && existingSource !== sourceName) { + throw new Error(`Configured Pi target name collision for ${sourceName}: ${configured}`) + } + + reservedBySource.set(configured, sourceName) + usedNames.add(configured) + } +} + +function assertNoConfiguredSharedTargetConflicts( + skillMap: Record | undefined, + agentMap: Record | undefined, +) { + const reserved = new Map() + + for (const [sourceName, configured] of Object.entries(skillMap ?? {})) { + if (!isSafePiManagedName(configured)) continue + reserved.set(configured, sourceName) + } + + for (const [sourceName, configured] of Object.entries(agentMap ?? {})) { + if (!isSafePiManagedName(configured)) continue + const existing = reserved.get(configured) + if (existing && existing !== sourceName) { + throw new Error(`Configured Pi target name collision for ${sourceName}: ${configured}`) + } + reserved.set(configured, sourceName) } - const name = `${base}-${index}` - used.add(name) - return name } diff --git a/src/parsers/claude-home.ts b/src/parsers/claude-home.ts index 3433a1513..6fd5600ba 100644 --- a/src/parsers/claude-home.ts +++ b/src/parsers/claude-home.ts @@ -53,6 +53,9 @@ async function loadPersonalSkills(skillsDir: string): Promise { description: data.description as string | undefined, argumentHint: data["argument-hint"] as string | undefined, disableModelInvocation: data["disable-model-invocation"] === true ? true : undefined, + entryDir: entryPath, + trustedRoot: skillsDir, + trustedBoundary: sourceDir, sourceDir, skillPath, }) diff --git a/src/sync/commands.ts b/src/sync/commands.ts index 8fcfba169..e90042c93 100644 --- a/src/sync/commands.ts +++ b/src/sync/commands.ts @@ -1,7 +1,8 @@ import path from "path" import type { ClaudeHomeConfig } from "../parsers/claude-home" import type { ClaudePlugin } from "../types/claude" -import { backupFile, resolveCommandPath, sanitizePathName, writeText } from "../utils/files" +import { backupFile, pathExists, readJson, readText, removeFileIfExists, resolveCommandPath, sanitizePathName, writeJson, writeText, writeTextIfChanged } from "../utils/files" +import { collectPiSameRunDependencies } from "../utils/pi-skills" import { convertClaudeToCodex } from "../converters/claude-to-codex" import { convertClaudeToCopilot } from "../converters/claude-to-copilot" import { convertClaudeToDroid } from "../converters/claude-to-droid" @@ -12,11 +13,34 @@ import { convertClaudeToPi } from "../converters/claude-to-pi" import { convertClaudeToQwen, type ClaudeToQwenOptions } from "../converters/claude-to-qwen" import { convertClaudeToWindsurf } from "../converters/claude-to-windsurf" import { writeWindsurfBundle } from "../targets/windsurf" +import type { PiBundle, PiManagedArtifact, PiManagedManifest } from "../types/pi" +import { resolvePiLayout } from "../utils/pi-layout" +import { createManagedArtifact } from "../utils/pi-managed" +import { classifyUnsupportedPiSyncStatus, isUnsupportedPiSyncArtifactError } from "./pi-artifact-status" type WindsurfSyncScope = "global" | "workspace" -const HOME_SYNC_PLUGIN_ROOT = path.join(process.cwd(), ".compound-sync-home") +export type PiSyncArtifactStatus = "published" | "retryable" | "blocked-by-policy" | "unsupported-final" + +export type SyncPiCommandResult = { + sourceName: string + emittedName: string + status: PiSyncArtifactStatus + artifact?: PiManagedArtifact + warning?: string + sameRunDependencies?: { + skills: string[] + prompts: string[] + } +} + +let piSyncCommandConversionHookForTests: (() => void | Promise) | null = null + +export function setPiSyncCommandConversionHookForTests(hook: (() => void | Promise) | null): void { + piSyncCommandConversionHookForTests = hook +} +const HOME_SYNC_PLUGIN_ROOT = path.join(process.cwd(), ".compound-sync-home") const DEFAULT_SYNC_OPTIONS: ClaudeToOpenCodeOptions = { agentMode: "subagent", inferTemperature: false, @@ -85,17 +109,97 @@ export async function syncCodexCommands( export async function syncPiCommands( config: ClaudeHomeConfig, outputRoot: string, -): Promise { - if (!hasCommands(config)) return + extraNameMaps?: PiManagedManifest["nameMaps"], + hooks?: { + onBeforeMutate?: (targetPath: string) => void | Promise + }, +): Promise { + const layout = resolvePiLayout(outputRoot, "sync") + let syncPrompts: SyncPiCommandResult[] = [] + const commands = [...(config.commands ?? [])].filter((entry) => !entry.disableModelInvocation).sort((a, b) => a.name.localeCompare(b.name)) - const plugin = buildClaudeHomePlugin(config) - const bundle = convertClaudeToPi(plugin, DEFAULT_SYNC_OPTIONS) - for (const prompt of bundle.prompts) { - await writeText(path.join(outputRoot, "prompts", `${prompt.name}.md`), prompt.content + "\n") - } - for (const extension of bundle.extensions) { - await writeText(path.join(outputRoot, "extensions", extension.name), extension.content + "\n") + if (commands.length > 0) { + try { + const bundle = await convertPiSyncCommandBundle({ ...config, commands }, extraNameMaps) + const promptsBySourceName = new Map(bundle.prompts.map((prompt) => [prompt.sourceName ?? prompt.name, prompt])) + + for (const command of commands) { + const prompt = promptsBySourceName.get(command.name) + if (!prompt) continue + const targetPath = path.join(layout.promptsDir, `${prompt.name}.md`) + const nextContent = prompt.content + "\n" + const existing = await readText(targetPath).catch(() => null) + if (existing !== nextContent) { + await hooks?.onBeforeMutate?.(targetPath) + } + await writeTextIfChanged(targetPath, nextContent, { existingContent: existing }) + syncPrompts.push({ + sourceName: prompt.sourceName ?? prompt.name, + emittedName: prompt.name, + status: "published", + artifact: createManagedArtifact(layout, "prompt", prompt.sourceName ?? prompt.name, prompt.name), + sameRunDependencies: collectPiSameRunDependencies(command.body), + }) + } + + return syncPrompts + } catch (error) { + if (!isUnsupportedPiSyncArtifactError(error)) { + throw error + } + } + + for (const command of commands) { + let bundle: PiBundle + try { + bundle = await convertPiSyncCommandBundle({ ...config, commands: [command] }, extraNameMaps) + } catch (error) { + if (!isUnsupportedPiSyncArtifactError(error)) { + throw error + } + syncPrompts.push({ + sourceName: command.name, + emittedName: sanitizePathName(command.name), + status: classifyUnsupportedPiSyncStatus(error.message), + warning: `Skipping unsupported Pi sync command ${command.name}: ${error.message}`, + }) + continue + } + + for (const prompt of bundle.prompts) { + const targetPath = path.join(layout.promptsDir, `${prompt.name}.md`) + const nextContent = prompt.content + "\n" + const existing = await readText(targetPath).catch(() => null) + if (existing !== nextContent) { + await hooks?.onBeforeMutate?.(targetPath) + } + await writeTextIfChanged(targetPath, nextContent, { existingContent: existing }) + syncPrompts.push({ + sourceName: prompt.sourceName ?? prompt.name, + emittedName: prompt.name, + status: "published", + artifact: createManagedArtifact(layout, "prompt", prompt.sourceName ?? prompt.name, prompt.name), + sameRunDependencies: collectPiSameRunDependencies(command.body), + }) + } + } } + + return syncPrompts +} + +async function convertPiSyncCommandBundle( + config: ClaudeHomeConfig, + extraNameMaps?: PiManagedManifest["nameMaps"], +): Promise { + await piSyncCommandConversionHookForTests?.() + return convertClaudeToPi(buildClaudeHomePlugin({ ...config, skills: [] }), { + ...DEFAULT_SYNC_OPTIONS, + extraNameMaps, + preserveUnknownQualifiedRefs: true, + rejectUnknownQualifiedTaskRefs: true, + rejectUnresolvedFirstPartyQualifiedRefs: true, + }) } export async function syncDroidCommands( diff --git a/src/sync/json-config.ts b/src/sync/json-config.ts index c12780fdb..4fa45733f 100644 --- a/src/sync/json-config.ts +++ b/src/sync/json-config.ts @@ -1,5 +1,5 @@ import path from "path" -import { pathExists, readJson, writeJsonSecure } from "../utils/files" +import { pathExists, writeJsonSecureIfChanged, captureTextFileSnapshot, restoreTextFileSnapshot } from "../utils/files" type JsonObject = Record @@ -11,28 +11,59 @@ export async function mergeJsonConfigAtKey(options: { configPath: string key: string incoming: Record -}): Promise { - const { configPath, key, incoming } = options - const existing = await readJsonObjectSafe(configPath) - const existingEntries = isJsonObject(existing[key]) ? existing[key] : {} + replaceKeys?: string[] + snapshotOnWrite?: boolean +}): Promise<{ didWrite: boolean; isEmpty: boolean }> { + const { configPath, key, incoming, replaceKeys = [], snapshotOnWrite = true } = options + const existingText = await pathExists(configPath) ? await readTextFileSafe(configPath) : null + const existing = readJsonObjectSafe(existingText, configPath) + const existingEntries = isJsonObject(existing[key]) ? { ...existing[key] } : {} + for (const replaceKey of replaceKeys) { + delete existingEntries[replaceKey] + } + const mergedEntries = { + ...existingEntries, + ...incoming, + } const merged = { ...existing, - [key]: { - ...existingEntries, - ...incoming, // incoming plugin entries overwrite same-named servers - }, + [key]: mergedEntries, } - await writeJsonSecure(configPath, merged) + if (Object.keys(mergedEntries).length === 0) { + delete merged[key] + } + + const nextText = JSON.stringify(merged, null, 2) + "\n" + if (existingText === nextText) { + return { + didWrite: false, + isEmpty: Object.keys(merged).length === 0, + } + } + + const snapshot = snapshotOnWrite ? await captureTextFileSnapshot(configPath) : null + + try { + return { + didWrite: await writeJsonSecureIfChanged(configPath, merged), + isEmpty: Object.keys(merged).length === 0, + } + } catch (error) { + if (snapshot) { + await restoreTextFileSnapshot(snapshot) + } + throw error + } } -async function readJsonObjectSafe(configPath: string): Promise { - if (!(await pathExists(configPath))) { +function readJsonObjectSafe(existingText: string | null, configPath: string): JsonObject { + if (existingText === null) { return {} } try { - const parsed = await readJson(configPath) + const parsed = JSON.parse(existingText) as unknown if (isJsonObject(parsed)) { return parsed } @@ -45,3 +76,11 @@ async function readJsonObjectSafe(configPath: string): Promise { ) return {} } + +async function readTextFileSafe(configPath: string): Promise { + try { + return await Bun.file(configPath).text() + } catch { + return null + } +} diff --git a/src/sync/pi-artifact-status.ts b/src/sync/pi-artifact-status.ts new file mode 100644 index 000000000..d68c83f5a --- /dev/null +++ b/src/sync/pi-artifact-status.ts @@ -0,0 +1,17 @@ +import type { PiSyncArtifactStatus } from "./commands" + +export function isUnsupportedPiSyncArtifactError(error: unknown): error is Error { + return error instanceof Error && error.message.startsWith("Unsupported ") +} + +export function classifyUnsupportedPiSyncStatus(message: string): PiSyncArtifactStatus { + if (message.startsWith("Unsupported unresolved first-party qualified ref for Pi sync:")) { + return "retryable" + } + + if (message.startsWith("Unsupported foreign qualified Task ref for Pi sync:")) { + return "blocked-by-policy" + } + + return "unsupported-final" +} diff --git a/src/sync/pi-skills.ts b/src/sync/pi-skills.ts new file mode 100644 index 000000000..5b99063ff --- /dev/null +++ b/src/sync/pi-skills.ts @@ -0,0 +1,129 @@ +import { promises as fs } from "fs" +import path from "path" +import type { ClaudeSkill } from "../types/claude" +import { ensureDir } from "../utils/files" +import { copySkillDirForPi, collectPiSameRunDependencies, type PiNameMaps } from "../utils/pi-skills" +import { isValidSkillName } from "../utils/symlink" +import type { PiSyncArtifactStatus } from "./commands" +import { classifyUnsupportedPiSyncStatus, isUnsupportedPiSyncArtifactError } from "./pi-artifact-status" + +type SyncPiSkillHooks = { + onBeforeMutate?: (skillName: string, targetPath: string, mode: "incremental" | "replace") => void | Promise +} + +export type SyncPiSkillResult = { + sourceName: string + emittedName: string + status: PiSyncArtifactStatus + warning?: string + sameRunDependencies?: { + skills: string[] + prompts: string[] + } +} + +export async function collectSyncablePiSkills(skills: ClaudeSkill[]): Promise { + const validSkills: ClaudeSkill[] = [] + + for (const skill of [...skills].sort((a, b) => a.name < b.name ? -1 : a.name > b.name ? 1 : 0)) { + if (!isValidSkillName(skill.name)) { + console.warn(`Skipping skill with unsafe name: ${skill.name}`) + continue + } + + const trustedRoot = skill.trustedRoot ?? skill.entryDir ?? skill.sourceDir + const discoveryPath = skill.entryDir ?? skill.sourceDir + const isWithinTrustedRoot = skill.entryDir + ? isLexicalPathWithinRoot(discoveryPath, trustedRoot) + : await isCanonicalPathWithinRoot(discoveryPath, trustedRoot) + if (!isWithinTrustedRoot) { + console.warn(`Skipping skill outside trusted root: ${skill.name}`) + continue + } + + validSkills.push(skill) + } + + return validSkills +} + +export async function syncPiSkills( + skills: ClaudeSkill[], + skillsDir: string, + skillMap: Record, + nameMaps?: PiNameMaps, + hooks?: SyncPiSkillHooks, +): Promise { + await ensureDir(skillsDir) + + const materialized: SyncPiSkillResult[] = [] + + for (const skill of skills) { + const targetName = skillMap[skill.name] + if (!targetName) continue + const target = path.join(skillsDir, targetName) + const trustedBoundary = skill.trustedBoundary ?? skill.sourceDir + + try { + await copySkillDirForPi( + skill.sourceDir, + target, + targetName, + nameMaps, + { + trustedRoot: trustedBoundary, + }, + { + preserveUnknownQualifiedRefs: true, + rejectUnknownQualifiedTaskRefs: true, + rejectUnresolvedFirstPartyQualifiedRefs: true, + }, + { + onBeforeMutate: (mode) => hooks?.onBeforeMutate?.(skill.name, target, mode), + }, + ) + } catch (error) { + if (!isUnsupportedPiSyncArtifactError(error)) { + throw error + } + materialized.push({ + sourceName: skill.name, + emittedName: targetName, + status: classifyUnsupportedPiSyncStatus(error instanceof Error ? error.message : String(error)), + warning: `Skipping unsupported Pi sync skill ${skill.name}: ${error instanceof Error ? error.message : String(error)}`, + }) + continue + } + materialized.push({ + sourceName: skill.name, + emittedName: targetName, + status: "published", + sameRunDependencies: collectPiSameRunDependencies(await fs.readFile(skill.skillPath, "utf8").catch(() => "")), + }) + } + + return materialized +} + +async function isCanonicalPathWithinRoot(candidatePath: string, trustedRoot: string): Promise { + const [resolvedCandidate, resolvedRoot] = await Promise.all([ + canonicalizePath(candidatePath), + canonicalizePath(trustedRoot), + ]) + + return resolvedCandidate === resolvedRoot || resolvedCandidate.startsWith(resolvedRoot + path.sep) +} + +function isLexicalPathWithinRoot(candidatePath: string, trustedRoot: string): boolean { + const resolvedCandidate = path.resolve(candidatePath) + const resolvedRoot = path.resolve(trustedRoot) + return resolvedCandidate === resolvedRoot || resolvedCandidate.startsWith(resolvedRoot + path.sep) +} + +async function canonicalizePath(targetPath: string): Promise { + try { + return await fs.realpath(targetPath) + } catch { + return path.resolve(targetPath) + } +} diff --git a/src/sync/pi.ts b/src/sync/pi.ts index 9bd00766d..016770472 100644 --- a/src/sync/pi.ts +++ b/src/sync/pi.ts @@ -1,10 +1,37 @@ import path from "path" +import os from "os" import type { ClaudeHomeConfig } from "../parsers/claude-home" import type { ClaudeMcpServer } from "../types/claude" -import { ensureDir } from "../utils/files" -import { syncPiCommands } from "./commands" +import { promises as fs } from "fs" +import { captureManagedPathSnapshot, ensureDir, ensureManagedDir, pathExists, readJson, readText, removeFileIfExists, removeManagedPathIfExists, restoreManagedPathSnapshot, sanitizePathName, type ManagedPathSnapshot, writeTextIfChanged } from "../utils/files" +import { buildPiSameRunQualifiedNameMap, normalizePiSkillName, uniquePiSkillName, type PiNameMaps } from "../utils/pi-skills" +import { PI_COMPAT_EXTENSION_SOURCE } from "../templates/pi/compat-extension" +import type { PiManagedArtifact } from "../types/pi" +import { buildPiAgentsBlock, ensurePiAgentsBlock, upsertBlock } from "../targets/pi" +import { syncPiCommands, type PiSyncArtifactStatus, type SyncPiCommandResult } from "./commands" import { mergeJsonConfigAtKey } from "./json-config" -import { syncSkills } from "./skills" +import { collectSyncablePiSkills, syncPiSkills, type SyncPiSkillResult } from "./pi-skills" +import { + canUseTrustedNameMaps, + canUseVerifiedCleanup, + collectLegacyArtifactCandidates, + createManagedArtifact, + createPiManagedSection, + filterPiManagedStateForVerifiedSections, + getPiManagedTrustInfo, + getReservedPiTargetNames, + mergePiNameMaps, + removeLegacyArtifactCandidates, + removeStaleManagedArtifacts, + resolveManagedArtifactPath, + replacePiManagedSection, + writePiManagedState, +} from "../utils/pi-managed" +import { resolvePiLayout } from "../utils/pi-layout" +import { getPiPolicyFingerprint } from "../utils/pi-policy" +import { derivePiSharedResourceContract } from "../utils/pi-trust-contract" + +const PI_COMPAT_EXTENSION_NAME = "compound-engineering-compat.ts" type McporterServer = { baseUrl?: string @@ -18,24 +45,796 @@ type McporterConfig = { mcpServers: Record } +type LegacySyncBootstrap = { + preserveCompatExtension: boolean + warnings: string[] +} + +type SyncPublicationSnapshots = { + snapshotRoot: string | null + snapshots: Map +} + +type ExistingJsonState = "missing" | "valid" | "invalid" + +type GlobalFallbackPolicy = { + install: boolean + sync: boolean +} + +type PiSyncRerunMode = "narrow" | "full" + +type PiSyncPassHookPayload = { + passNumber: number + activeCommandNames: string[] + activeSkillNames: string[] +} + +let piSyncRerunModeForTests: PiSyncRerunMode = "narrow" +let piSyncPassHookForTests: ((payload: PiSyncPassHookPayload) => void | Promise) | null = null + +export function setPiSyncRerunModeForTests(mode: PiSyncRerunMode | null): void { + piSyncRerunModeForTests = mode ?? "narrow" +} + +export function setPiSyncPassHookForTests(hook: ((payload: PiSyncPassHookPayload) => void | Promise) | null): void { + piSyncPassHookForTests = hook +} + +function resolveUserHome(): string { + return process.env.HOME || os.homedir() +} + export async function syncToPi( config: ClaudeHomeConfig, outputRoot: string, ): Promise { - const mcporterPath = path.join(outputRoot, "compound-engineering", "mcporter.json") - - await syncSkills(config.skills, path.join(outputRoot, "skills")) - await syncPiCommands(config, outputRoot) - - if (Object.keys(config.mcpServers).length > 0) { - await ensureDir(path.dirname(mcporterPath)) - const converted = convertMcpToMcporter(config.mcpServers) - await mergeJsonConfigAtKey({ - configPath: mcporterPath, - key: "mcpServers", - incoming: converted.mcpServers, + const policyFingerprint = getPiPolicyFingerprint() + const layout = resolvePiLayout(outputRoot, "sync") + const trustInfo = await getPiManagedTrustInfo(layout) + const previousState = trustInfo.state + const syncCleanupVerified = canUseVerifiedCleanup(trustInfo, "sync") + const trustedLocalInstallNameMaps = await loadTrustedLocalInstallNameMaps(outputRoot, trustInfo) + const fallbackPolicy = await resolveGlobalFallbackPolicy(outputRoot) + const trustedGlobalInstallNameMaps = fallbackPolicy.install ? await loadTrustedGlobalInstallNameMaps(layout.root) : undefined + const trustedGlobalSyncNameMaps = fallbackPolicy.sync ? await loadTrustedGlobalSyncNameMaps(layout.root) : undefined + const reservedNames = getReservedPiTargetNames(previousState ? { + ...previousState, + install: canUseTrustedNameMaps(trustInfo, "install") ? previousState.install : createPiManagedSection(), + sync: createPiManagedSection(), + nameMaps: canUseTrustedNameMaps(trustInfo, "install") + ? previousState.install.nameMaps + : createPiManagedSection().nameMaps, + } : null) + + const commands = [...(config.commands ?? [])] + .filter((command) => !command.disableModelInvocation) + .sort((a, b) => a.name < b.name ? -1 : a.name > b.name ? 1 : 0) + const syncableSkills = await collectSyncablePiSkills(config.skills) + const promptNames = new Set(reservedNames.prompts) + const promptMap: Record = {} + const skillNames = new Set(reservedNames.skills) + const localSkillMap: Record = {} + reservePiManagedNames(promptNames, trustedLocalInstallNameMaps?.prompts) + reservePiManagedNames(promptNames, trustedGlobalInstallNameMaps?.prompts) + reservePiManagedNames(promptNames, trustedGlobalSyncNameMaps?.prompts) + reservePiManagedNames(skillNames, trustedLocalInstallNameMaps?.skills) + reservePiManagedNames(skillNames, trustedLocalInstallNameMaps?.agents) + reservePiManagedNames(skillNames, trustedGlobalInstallNameMaps?.skills) + reservePiManagedNames(skillNames, trustedGlobalInstallNameMaps?.agents) + reservePiManagedNames(skillNames, trustedGlobalSyncNameMaps?.skills) + reservePiManagedNames(skillNames, trustedGlobalSyncNameMaps?.agents) + const previousPromptMap = canUseTrustedNameMaps(trustInfo, "sync") + ? previousState?.sync.nameMaps.prompts ?? {} + : {} + const previousSkillMap = canUseTrustedNameMaps(trustInfo, "sync") + ? previousState?.sync.nameMaps.skills ?? {} + : {} + for (const command of commands) { + const targetName = reservePreviousOrUniqueName(command.name, previousPromptMap, promptNames) + promptMap[command.name] = targetName + promptMap[`claude-home:${command.name}`] = targetName + } + + const syncSkillMap: Record = {} + for (const skill of syncableSkills) { + const targetName = reservePreviousOrUniqueName(skill.name, previousSkillMap, skillNames) + localSkillMap[skill.name] = targetName + localSkillMap[`claude-home:${skill.name}`] = targetName + syncSkillMap[skill.name] = targetName + } + + const trustedInstallLayers = mergePiNameMaps(trustedGlobalInstallNameMaps, trustedLocalInstallNameMaps) + const trustedBaseNameMaps = mergePiNameMaps(trustedInstallLayers, filterQualifiedPiNameMaps(trustedGlobalSyncNameMaps)) + + const publicationSnapshots = await captureSyncPublicationSnapshots(layout) + + const aggregatedSkillResults = new Map() + const aggregatedPromptResults = new Map() + const currentRunArtifacts = new Map() + const finalWarnings = new Map() + try { + let activeCommands = commands + let activeSkills = syncableSkills + let activePromptMap = promptMap + let activeSkillMap = syncSkillMap + let passNumber = 0 + + while (true) { + passNumber += 1 + await piSyncPassHookForTests?.({ + passNumber, + activeCommandNames: activeCommands.map((command) => command.name), + activeSkillNames: activeSkills.map((skill) => skill.name), + }) + const sameRunSkillMap = { + ...activeSkillMap, + ...buildPiSameRunQualifiedNameMap(activeSkillMap), + } + const commandNameMaps = mergePiNameMaps(trustedBaseNameMaps, { skills: sameRunSkillMap, prompts: activePromptMap }) + const skillNameMaps = mergePiNameMaps(trustedBaseNameMaps, { prompts: activePromptMap, skills: sameRunSkillMap }) + + const skillResults = await syncPiSkills(activeSkills, layout.skillsDir, activeSkillMap, skillNameMaps, { + onBeforeMutate: async (_skillName, targetPath, mode) => { + if (mode === "incremental") return + await rememberSyncPublicationSnapshot(publicationSnapshots, targetPath) + }, + }) + const promptResults = await syncPiCommands( + { ...config, commands: activeCommands }, + outputRoot, + commandNameMaps, + { + onBeforeMutate: async (targetPath) => { + await rememberSyncPublicationSnapshot(publicationSnapshots, targetPath) + }, + }, + ) + + const stableSkillResults = stabilizeSameRunQualifiedDependencies(skillResults, activeSkillMap, activePromptMap) + const stablePromptResults = stabilizeSameRunQualifiedDependencies(promptResults, activeSkillMap, activePromptMap) + + for (const result of stableSkillResults) { + aggregatedSkillResults.set(result.sourceName, result) + } + for (const result of stablePromptResults) { + aggregatedPromptResults.set(result.sourceName, result) + } + + const publishedSkills = stableSkillResults.filter(isPublishedSkillResult) + const publishedPrompts = stablePromptResults.filter(isPublishedPromptResult).map((result) => result.artifact) + const materializedSkillArtifacts = publishedSkills.map((skill) => + createManagedArtifact(layout, "synced-skill", skill.sourceName, skill.emittedName)) + for (const artifact of [...publishedPrompts, ...materializedSkillArtifacts]) { + currentRunArtifacts.set(`${artifact.kind}:${artifact.relativePath}`, artifact) + } + + const aggregatePublishedPromptMap = filterPublishedPromptMap( + activePromptMap, + [...aggregatedPromptResults.values()].filter(isPublishedPromptResult).map((result) => result.artifact), + ) + const aggregatePublishedSkillMap = filterPublishedSkillMap( + activeSkillMap, + [...aggregatedSkillResults.values()].filter(isPublishedSkillResult), + ) + + const retryablePromptNames = new Set(stablePromptResults.filter((result) => result.status === "retryable").map((result) => result.sourceName)) + const retryableSkillNames = new Set(stableSkillResults.filter((result) => result.status === "retryable").map((result) => result.sourceName)) + const nextActivePromptMap = filterActivePromptMap(activePromptMap, aggregatePublishedPromptMap, retryablePromptNames) + const nextActiveSkillMap = filterActiveSkillMap(activeSkillMap, aggregatePublishedSkillMap, retryableSkillNames) + + for (const result of [...stablePromptResults, ...stableSkillResults]) { + const key = `${result.sourceName}:${result.emittedName}` + if (result.status === "published") { + finalWarnings.delete(key) + } else if (result.warning) { + finalWarnings.set(key, result.warning) + } + } + + if (samePiNameMapEntries(activePromptMap, nextActivePromptMap) && samePiNameMapEntries(activeSkillMap, nextActiveSkillMap)) { + break + } + + activePromptMap = nextActivePromptMap + activeSkillMap = nextActiveSkillMap + if (piSyncRerunModeForTests === "full") { + activeCommands = activeCommands.filter((command) => Boolean(activePromptMap[command.name])) + activeSkills = activeSkills.filter((skill) => Boolean(activeSkillMap[skill.name])) + continue + } + + activeCommands = commands.filter((command) => retryablePromptNames.has(command.name) && Boolean(activePromptMap[command.name])) + activeSkills = syncableSkills.filter((skill) => retryableSkillNames.has(skill.name) && Boolean(activeSkillMap[skill.name])) + } + } catch (error) { + await restoreSyncPublicationSnapshots(publicationSnapshots) + throw error + } + const skillResults = [...aggregatedSkillResults.values()] + const promptResults = [...aggregatedPromptResults.values()] + const publishedSkills = skillResults.filter(isPublishedSkillResult) + const prompts = promptResults.filter(isPublishedPromptResult).map((result) => result.artifact) + const materializedSkillNames = new Set(publishedSkills.map((skill) => skill.sourceName)) + + const nextSyncArtifacts = [ + ...publishedSkills.map((skill) => createManagedArtifact(layout, "synced-skill", skill.sourceName, skill.emittedName)), + ...prompts, + ] + const verifiedPreviousSyncArtifacts = syncCleanupVerified ? previousState?.sync.artifacts ?? [] : [] + const legacySyncCandidates = [ + ...collectLegacyArtifactCandidates(layout, nextSyncArtifacts), + ...await collectLegacySkillDirectoryCandidates(layout, nextSyncArtifacts, verifiedPreviousSyncArtifacts), + ] + const verifiedInstallKeepsCompat = Boolean(canUseVerifiedCleanup(trustInfo, "install") && previousState?.install.sharedResources.compatExtension === true) + const verifiedSections = { + install: canUseVerifiedCleanup(trustInfo, "install"), + sync: syncCleanupVerified, + } + const installKeepsCompat = verifiedInstallKeepsCompat + const incomingSyncMcpServers = Object.keys(config.mcpServers) + const convertedSyncMcpServers = convertMcpToMcporter(config.mcpServers) + const emittedSyncMcpServerNames = Object.keys(convertedSyncMcpServers.mcpServers).sort() + const preserveUnverifiedMalformedMcporter = incomingSyncMcpServers.length > 0 + && !syncCleanupVerified + && await inspectJsonObjectState(layout.mcporterConfigPath) === "invalid" + const legacyBootstrap = await deriveLegacySyncBootstrap({ + layout, + trustInfo, + installArtifacts: canUseVerifiedCleanup(trustInfo, "install") ? previousState?.install.artifacts ?? [] : [], + ignoredLegacyPaths: new Set(legacySyncCandidates.map((candidate) => path.resolve(candidate.path))), + nextSyncArtifacts, + nextSyncMcpServers: incomingSyncMcpServers, + nextNeedsCompatExtension: needsPiCompatExtension(config), + installKeepsCompat, + }) + const publishedPromptMap = filterPublishedPromptMap(promptMap, prompts) + const publishedSkillMap = filterPublishedSkillMap(localSkillMap, publishedSkills) + + const nextState = replacePiManagedSection(previousState, "sync", createPiManagedSection({ + nameMaps: { + skills: publishedSkillMap, + prompts: publishedPromptMap, + }, + artifacts: nextSyncArtifacts, + mcpServers: preserveUnverifiedMalformedMcporter ? [] : emittedSyncMcpServerNames, + sharedResources: { + compatExtension: materializedSkillNames.size > 0 || prompts.length > 0 || emittedSyncMcpServerNames.length > 0, + mcporterConfig: preserveUnverifiedMalformedMcporter ? false : emittedSyncMcpServerNames.length > 0, + }, + }), previousState?.pluginName) + nextState.policyFingerprint = policyFingerprint + const nextCompatContract = derivePiSharedResourceContract({ + nextOwns: nextState.sync.sharedResources.compatExtension, + otherVerifiedOwner: verifiedInstallKeepsCompat, + preserveUntrusted: legacyBootstrap.preserveCompatExtension, + }) + const removeTrackedFileIfExists = async (filePath: string): Promise => { + await rememberSyncPublicationSnapshot(publicationSnapshots, filePath) + await removeFileIfExists(filePath) + } + const removeTrackedSkillDirectoryIfExists = async (dirPath: string): Promise => { + await rememberSyncPublicationSnapshot(publicationSnapshots, dirPath) + await removePiManagedSkillDirectory(dirPath) + } + + const retainedArtifactPaths = new Set() + for (const artifact of nextSyncArtifacts) { + const artifactPath = resolveManagedArtifactPath(layout, artifact) + if (artifactPath) retainedArtifactPaths.add(path.resolve(artifactPath)) + } + + const staleCurrentRunArtifacts = [...currentRunArtifacts.values()].filter((artifact) => { + const artifactPath = resolveManagedArtifactPath(layout, artifact) + return artifactPath ? !retainedArtifactPaths.has(path.resolve(artifactPath)) : false + }) + + for (const warning of legacyBootstrap.warnings) { + console.warn(warning) + } + for (const warning of finalWarnings.values()) { + console.warn(warning) + } + + try { + for (const artifact of staleCurrentRunArtifacts) { + const artifactPath = resolveManagedArtifactPath(layout, artifact) + if (!artifactPath) continue + if (artifact.kind === "prompt") { + await removeTrackedFileIfExists(artifactPath) + } else { + await removeTrackedSkillDirectoryIfExists(artifactPath) + } + } + + if (nextCompatContract.retain) { + await ensureManagedDir(layout.extensionsDir) + const compatPath = path.join(layout.extensionsDir, PI_COMPAT_EXTENSION_NAME) + if (nextCompatContract.state === "active") { + const existingCompat = await readText(compatPath).catch(() => null) + const nextCompat = PI_COMPAT_EXTENSION_SOURCE + "\n" + try { + if (existingCompat !== nextCompat) { + await rememberSyncPublicationSnapshot(publicationSnapshots, compatPath) + } + await writeTextIfChanged(compatPath, nextCompat, { existingContent: existingCompat }) + } catch (error) { + throw error + } + } + } else { + await rememberSyncPublicationSnapshot(publicationSnapshots, path.join(layout.extensionsDir, PI_COMPAT_EXTENSION_NAME)) + await removeFileIfExists(path.join(layout.extensionsDir, PI_COMPAT_EXTENSION_NAME)) + } + + const agentsBefore = await readText(layout.agentsPath).catch(() => null) + const shouldAdvertiseCompatTools = nextCompatContract.advertise + const agentsBlock = buildPiAgentsBlock(shouldAdvertiseCompatTools) + const nextAgents = agentsBefore === null ? agentsBlock + "\n" : upsertBlock(agentsBefore, agentsBlock) + if (nextAgents !== agentsBefore) { + await rememberSyncPublicationSnapshot(publicationSnapshots, layout.agentsPath) + } + await ensurePiAgentsBlock(layout.agentsPath, shouldAdvertiseCompatTools) + + if (incomingSyncMcpServers.length > 0) { + await ensureManagedDir(path.dirname(layout.mcporterConfigPath)) + if (preserveUnverifiedMalformedMcporter) { + console.warn(`Warning: found malformed legacy mcporter.json at ${layout.mcporterConfigPath}; leaving it untouched because sync ownership cannot be proven.`) + } else { + await rememberSyncPublicationSnapshot(publicationSnapshots, layout.mcporterConfigPath) + await mergeJsonConfigAtKey({ + configPath: layout.mcporterConfigPath, + key: "mcpServers", + incoming: convertedSyncMcpServers.mcpServers, + replaceKeys: syncCleanupVerified ? previousState?.sync.mcpServers ?? [] : [], + snapshotOnWrite: false, + }) + } + } else if (syncCleanupVerified && (previousState?.sync.mcpServers.length ?? 0) > 0) { + await rememberSyncPublicationSnapshot(publicationSnapshots, layout.mcporterConfigPath) + const result = await mergeJsonConfigAtKey({ + configPath: layout.mcporterConfigPath, + key: "mcpServers", + incoming: {}, + replaceKeys: previousState?.sync.mcpServers ?? [], + snapshotOnWrite: false, + }) + + if (result.didWrite && result.isEmpty) { + await removeFileIfExists(layout.mcporterConfigPath) + } + } + + await removeStaleManagedArtifacts( + layout, + filterPiManagedStateForVerifiedSections(previousState, { sync: canUseVerifiedCleanup(trustInfo, "sync") }), + nextState, + removeTrackedFileIfExists, + removeTrackedSkillDirectoryIfExists, + ) + if (syncCleanupVerified) { + await removeLegacyArtifactCandidates(legacySyncCandidates, removeTrackedFileIfExists, removeTrackedSkillDirectoryIfExists) + } else { + await warnAboutUnverifiedLegacyArtifactCandidates(legacySyncCandidates) + } + + const didWriteManagedState = await writePiManagedState(layout, nextState, { + install: canUseVerifiedCleanup(trustInfo, "install"), + sync: true, }) + if (didWriteManagedState) { + await rememberSyncPublicationSnapshot(publicationSnapshots, layout.managedManifestPath) + await rememberSyncPublicationSnapshot(publicationSnapshots, layout.verificationPath) + } + } catch (error) { + await restoreSyncPublicationSnapshots(publicationSnapshots) + throw error + } + if (publicationSnapshots.snapshotRoot) { + await fs.rm(publicationSnapshots.snapshotRoot, { recursive: true, force: true }).catch(() => undefined) + } +} + +async function captureSyncPublicationSnapshots( + layout: ReturnType, +): Promise { + await ensureManagedDir(layout.root) + return { snapshotRoot: null, snapshots: new Map() } +} + +async function rememberSyncPublicationSnapshot( + rollback: SyncPublicationSnapshots, + targetPath: string, +): Promise { + if (rollback.snapshots.has(targetPath)) return + if (!rollback.snapshotRoot) { + await ensureManagedDir(path.dirname(targetPath)) + rollback.snapshotRoot = await fs.mkdtemp(path.join(path.dirname(targetPath), ".pi-sync-rollback-")) + } + rollback.snapshots.set(targetPath, await captureManagedPathSnapshot(targetPath, rollback.snapshotRoot)) +} + +async function restoreSyncPublicationSnapshots(rollback: SyncPublicationSnapshots): Promise { + for (const snapshot of [...rollback.snapshots.values()].reverse()) { + await restoreManagedPathSnapshot(snapshot) + } + if (rollback.snapshotRoot) { + await fs.rm(rollback.snapshotRoot, { recursive: true, force: true }).catch(() => undefined) + } +} + +async function inspectJsonObjectState(configPath: string): Promise { + if (!(await pathExists(configPath))) { + return "missing" + } + + try { + const parsed = await readJson(configPath) + if (parsed && typeof parsed === "object" && !Array.isArray(parsed)) { + return "valid" + } + } catch { + return "invalid" + } + + return "invalid" +} + +function needsPiCompatExtension(config: ClaudeHomeConfig): boolean { + return config.skills.length > 0 || (config.commands?.length ?? 0) > 0 || Object.keys(config.mcpServers).length > 0 +} + +async function resolveGlobalFallbackPolicy(currentRoot: string): Promise { + return { + install: await shouldAllowGlobalFallbackForSection(currentRoot, "install"), + sync: await shouldAllowGlobalFallbackForSection(currentRoot, "sync"), + } +} + +async function shouldAllowGlobalFallbackForSection(currentRoot: string, sectionName: "install" | "sync"): Promise { + for (const candidateRoot of walkUpPaths(path.resolve(currentRoot))) { + const directManifestPath = path.join(candidateRoot, "compound-engineering", "compound-engineering-managed.json") + const nestedInstallManifestPath = path.join(candidateRoot, ".pi", "compound-engineering", "compound-engineering-managed.json") + + const hasDirectManifest = await pathExists(directManifestPath) + const hasNestedManifest = await pathExists(nestedInstallManifestPath) + if (!hasDirectManifest && !hasNestedManifest) { + continue + } + + const candidates = await Promise.all([ + hasDirectManifest ? getPiManagedTrustInfo(resolvePiLayout(candidateRoot, "sync")) : Promise.resolve(null), + hasNestedManifest ? getPiManagedTrustInfo(resolvePiLayout(candidateRoot, "install")) : Promise.resolve(null), + ]) + + return !candidates.some((candidate) => hasPiManagedSectionData(candidate?.state?.[sectionName])) } + + return true +} + +function walkUpPaths(start: string): string[] { + const paths: string[] = [] + let current = start + + while (true) { + paths.push(current) + const parent = path.dirname(current) + if (parent === current) break + current = parent + } + + return paths +} + +function hasPiManagedSectionData(section: Awaited>["state"] extends infer T + ? T extends { install: infer S; sync: infer S } ? S | undefined : never + : never): boolean { + if (!section) return false + return Boolean( + Object.keys(section.nameMaps?.agents ?? {}).length + || Object.keys(section.nameMaps?.skills ?? {}).length + || Object.keys(section.nameMaps?.prompts ?? {}).length + || section.artifacts.length > 0 + || section.mcpServers.length > 0 + || section.sharedResources.compatExtension + || section.sharedResources.mcporterConfig, + ) +} + +function filterPublishedPromptMap(promptMap: Record, prompts: PiManagedArtifact[]): Record { + const publishedPromptNames = new Set(prompts.map((prompt) => prompt.sourceName ?? prompt.emittedName)) + return Object.fromEntries( + Object.entries(promptMap).filter(([sourceName]) => { + if (sourceName.startsWith("claude-home:")) { + return publishedPromptNames.has(sourceName.slice("claude-home:".length)) + } + return publishedPromptNames.has(sourceName) + }), + ) +} + +function filterPublishedSkillMap(skillMap: Record, materializedSkills: SyncPiSkillResult[]): Record { + const publishedSkillNames = new Set(materializedSkills.map((skill) => skill.sourceName)) + return Object.fromEntries( + Object.entries(skillMap).filter(([sourceName]) => { + if (sourceName.startsWith("claude-home:")) { + return publishedSkillNames.has(sourceName.slice("claude-home:".length)) + } + return publishedSkillNames.has(sourceName) + }), + ) +} + +function filterActivePromptMap( + activePromptMap: Record, + publishedPromptMap: Record, + retryablePromptNames: Set, +): Record { + return Object.fromEntries( + Object.entries(activePromptMap).filter(([sourceName, emittedName]) => { + if (publishedPromptMap[sourceName] === emittedName) return true + const baseSource = sourceName.startsWith("claude-home:") + ? sourceName.slice("claude-home:".length) + : sourceName + return retryablePromptNames.has(baseSource) + }), + ) +} + +function filterActiveSkillMap( + activeSkillMap: Record, + publishedSkillMap: Record, + retryableSkillNames: Set, +): Record { + return Object.fromEntries( + Object.entries(activeSkillMap).filter(([sourceName, emittedName]) => { + if (publishedSkillMap[sourceName] === emittedName) return true + const baseSource = sourceName.startsWith("claude-home:") + ? sourceName.slice("claude-home:".length) + : sourceName + return retryableSkillNames.has(baseSource) + }), + ) +} + +function isPublishedPromptResult(result: SyncPiCommandResult): result is SyncPiCommandResult & { status: "published"; artifact: PiManagedArtifact } { + return result.status === "published" && Boolean(result.artifact) +} + +function isPublishedSkillResult(result: SyncPiSkillResult): result is SyncPiSkillResult & { status: "published" } { + return result.status === "published" +} + +function stabilizeSameRunQualifiedDependencies( + results: T[], + activeSkillMap: Record, + activePromptMap: Record, +): T[] { + const publishedSkills = new Set(results.filter((result) => result.status === "published").map((result) => result.sourceName)) + const publishedPrompts = new Set(results.filter((result) => result.status === "published").map((result) => result.sourceName)) + + return results.map((result) => { + if (result.status !== "published") return result + + const blockedSkillDependency = (result.sameRunDependencies?.skills ?? []).some((dependency) => + Boolean(activeSkillMap[dependency]) && !publishedSkills.has(dependency)) + const blockedPromptDependency = (result.sameRunDependencies?.prompts ?? []).some((dependency) => + Boolean(activePromptMap[dependency]) && !publishedPrompts.has(dependency)) + + if (!blockedSkillDependency && !blockedPromptDependency) { + return result + } + + return { + ...result, + status: "retryable", + warning: undefined, + } + }) +} + +async function collectLegacySkillDirectoryCandidates( + layout: ReturnType, + nextSyncArtifacts: PiManagedArtifact[], + previousSyncArtifacts: PiManagedArtifact[], +): Promise> { + if (!(await pathExists(layout.skillsDir))) { + return [] + } + + const retainedPaths = new Set() + for (const artifact of nextSyncArtifacts) { + const artifactPath = resolveManagedArtifactPath(layout, artifact) + if (artifactPath) retainedPaths.add(path.resolve(artifactPath)) + } + + const legacyNames = new Set() + for (const artifact of [...previousSyncArtifacts, ...nextSyncArtifacts]) { + if (artifact.kind !== "synced-skill") continue + legacyNames.add(artifact.emittedName) + legacyNames.add(sanitizePathName(artifact.sourceName)) + } + + const candidates: Array<{ expectedKind: "directory"; path: string }> = [] + for (const entry of await fs.readdir(layout.skillsDir, { withFileTypes: true })) { + if (!legacyNames.has(entry.name)) continue + const candidatePath = path.resolve(path.join(layout.skillsDir, entry.name)) + if (retainedPaths.has(candidatePath)) continue + candidates.push({ expectedKind: "directory", path: candidatePath }) + } + + return candidates +} + +function samePiNameMapEntries(left: Record, right: Record): boolean { + const leftEntries = Object.entries(left).sort(([leftKey], [rightKey]) => leftKey.localeCompare(rightKey)) + const rightEntries = Object.entries(right).sort(([leftKey], [rightKey]) => leftKey.localeCompare(rightKey)) + return JSON.stringify(leftEntries) === JSON.stringify(rightEntries) +} + +function filterQualifiedPiNameMaps(nameMaps?: PiNameMaps): PiNameMaps | undefined { + if (!nameMaps) return undefined + + return { + agents: Object.fromEntries(Object.entries(nameMaps.agents ?? {}).filter(([key]) => key.includes(":"))), + skills: Object.fromEntries(Object.entries(nameMaps.skills ?? {}).filter(([key]) => key.includes(":"))), + prompts: Object.fromEntries(Object.entries(nameMaps.prompts ?? {}).filter(([key]) => key.includes(":"))), + } +} + +async function loadTrustedGlobalSyncNameMaps(currentRoot: string): Promise { + const globalRoot = path.join(resolveUserHome(), ".pi", "agent") + if (path.resolve(globalRoot) === path.resolve(currentRoot)) { + return undefined + } + + const globalTrust = await getPiManagedTrustInfo(resolvePiLayout(globalRoot, "sync")) + if (!canUseTrustedNameMaps(globalTrust, "sync")) { + return undefined + } + + return globalTrust.state?.sync.nameMaps +} + +async function loadTrustedLocalInstallNameMaps( + currentRoot: string, + currentTrust: Awaited>, +): Promise { + const nestedInstallLayout = resolvePiLayout(currentRoot, "install") + if (!path.resolve(nestedInstallLayout.root).startsWith(path.resolve(currentRoot))) { + return canUseTrustedNameMaps(currentTrust, "install") ? currentTrust.state?.install.nameMaps : undefined + } + + const nestedTrust = await getPiManagedTrustInfo(nestedInstallLayout) + if (canUseTrustedNameMaps(nestedTrust, "install")) { + return nestedTrust.state?.install.nameMaps + } + + return canUseTrustedNameMaps(currentTrust, "install") ? currentTrust.state?.install.nameMaps : undefined +} + +async function loadTrustedGlobalInstallNameMaps(currentRoot: string): Promise { + const globalRoot = path.join(resolveUserHome(), ".pi", "agent") + if (path.resolve(globalRoot) === path.resolve(currentRoot)) { + return undefined + } + + const globalTrust = await getPiManagedTrustInfo(resolvePiLayout(globalRoot, "install")) + if (!canUseTrustedNameMaps(globalTrust, "install")) { + return undefined + } + + return globalTrust.state?.install.nameMaps +} + +async function deriveLegacySyncBootstrap(options: { + layout: ReturnType + trustInfo: Awaited> + installArtifacts: ReturnType["artifacts"] + ignoredLegacyPaths: Set + nextSyncArtifacts: ReturnType["artifacts"] + nextSyncMcpServers: string[] + nextNeedsCompatExtension: boolean + installKeepsCompat: boolean +}): Promise { + if (options.trustInfo.verifiedSections.sync) { + return { preserveCompatExtension: false, warnings: [] } + } + + const retainedPaths = new Set() + for (const artifact of [...options.installArtifacts, ...options.nextSyncArtifacts]) { + const artifactPath = resolveManagedArtifactPath(options.layout, artifact) + if (artifactPath) retainedPaths.add(path.resolve(artifactPath)) + } + + const warnings: string[] = [] + const compatPath = path.join(options.layout.extensionsDir, PI_COMPAT_EXTENSION_NAME) + const compatExists = await pathExists(compatPath) + const preserveCompatExtension = compatExists && !options.nextNeedsCompatExtension && !options.installKeepsCompat + + if (preserveCompatExtension) { + warnings.push(`Warning: found ambiguous legacy compat extension at ${compatPath}; removing the live compat extension because sync ownership cannot be proven.`) + } + + if (await pathExists(options.layout.promptsDir)) { + const promptEntries = await fs.readdir(options.layout.promptsDir, { withFileTypes: true }) + for (const entry of promptEntries) { + if (!entry.isFile() || !entry.name.endsWith(".md")) continue + const promptPath = path.resolve(path.join(options.layout.promptsDir, entry.name)) + if (retainedPaths.has(promptPath)) continue + if (options.ignoredLegacyPaths.has(promptPath)) continue + warnings.push(`Warning: found ambiguous legacy Pi sync artifact at ${promptPath}; leaving it in place because ownership cannot be proven.`) + } + } + + if (await pathExists(options.layout.mcporterConfigPath)) { + try { + const mcporter = await readJson<{ mcpServers?: Record }>(options.layout.mcporterConfigPath) + const legacyServers = Object.keys(mcporter.mcpServers ?? {}) + if (legacyServers.length > 0 && options.nextSyncMcpServers.length === 0) { + warnings.push(`Warning: found ambiguous legacy mcporter.json at ${options.layout.mcporterConfigPath}; leaving existing MCP servers in place because ownership cannot be proven.`) + } + } catch { + warnings.push(`Warning: found malformed legacy mcporter.json at ${options.layout.mcporterConfigPath}; leaving it untouched because ownership cannot be proven.`) + } + } + + return { + preserveCompatExtension, + warnings, + } +} + +async function warnAboutUnverifiedLegacyArtifactCandidates(candidates: Array<{ path: string }>): Promise { + const warned = new Set() + + for (const candidate of candidates) { + const resolvedPath = path.resolve(candidate.path) + if (warned.has(resolvedPath)) continue + warned.add(resolvedPath) + + try { + await fs.lstat(resolvedPath) + console.warn(`Warning: found ambiguous legacy Pi sync artifact at ${resolvedPath}; leaving it in place because ownership cannot be proven.`) + } catch (error) { + if ((error as NodeJS.ErrnoException).code === "ENOENT") continue + throw error + } + } +} + +function reservePreviousOrUniqueName( + sourceName: string, + previousMap: Record, + usedNames: Set, +): string { + const previousName = previousMap[sourceName] ?? previousMap[`claude-home:${sourceName}`] + if (previousName && !usedNames.has(previousName)) { + usedNames.add(previousName) + return previousName + } + + return uniquePiSkillName(normalizePiSkillName(sourceName), usedNames) +} + +function reservePiManagedNames(usedNames: Set, nameMap?: Record): void { + for (const value of Object.values(nameMap ?? {})) { + usedNames.add(value) + } +} + +function isValidMappedSkill(skillName: string, skillMap: Record): boolean { + return Boolean(skillMap[skillName]) +} + +async function removePiManagedSkillDirectory(dirPath: string): Promise { + await removeManagedPathIfExists(dirPath) } function convertMcpToMcporter(servers: Record): McporterConfig { diff --git a/src/targets/pi.ts b/src/targets/pi.ts index 3fe0c6c7a..5563f8911 100644 --- a/src/targets/pi.ts +++ b/src/targets/pi.ts @@ -1,121 +1,457 @@ +import { promises as fs } from "fs" import path from "path" import { + assertPathWithinRoot, backupFile, - copySkillDir, + captureManagedPathSnapshot, + captureTextFileSnapshot, ensureDir, + ensureManagedDir, + ManagedPathSnapshot, + removeFileIfExists, + removeManagedPathIfExists, pathExists, readText, - sanitizePathName, - writeJson, - writeText, + restoreManagedPathSnapshot, + restoreTextFileSnapshot, + assertSafePathComponent, + sanitizeSafePathName, + writeTextIfChanged, } from "../utils/files" -import { transformContentForPi } from "../converters/claude-to-pi" +import { mergeJsonConfigAtKey } from "../sync/json-config" +import { getPiPolicyFingerprint } from "../utils/pi-policy" +import { copySkillDirForPi } from "../utils/pi-skills" import type { PiBundle } from "../types/pi" +import { resolvePiLayout, samePiPath } from "../utils/pi-layout" +import { derivePiSharedResourceContract } from "../utils/pi-trust-contract" +import { + canUseVerifiedCleanup, + collectLegacyArtifactCandidates, + createManagedArtifact, + createPiManagedSection, + filterPiManagedStateForVerifiedSections, + getPiManagedTrustInfo, + planLegacyCustomRootInstallCleanup, + removeLegacyArtifactCandidates, + removeStaleManagedArtifacts, + replacePiManagedSection, + shouldWritePiManagedState, + writePiManagedState, +} from "../utils/pi-managed" -const PI_AGENTS_BLOCK_START = "" -const PI_AGENTS_BLOCK_END = "" - -const PI_AGENTS_BLOCK_BODY = `## Compound Engineering (Pi compatibility) +export const PI_AGENTS_BLOCK_START = "" +export const PI_AGENTS_BLOCK_END = "" +export const PI_AGENTS_BLOCK_BODY = `## Compound Engineering (Pi compatibility) This block is managed by compound-plugin. Compatibility notes: -- Claude Task(agent, args) maps to the subagent extension tool -- For parallel agent runs, batch multiple subagent calls with multi_tool_use.parallel +- Claude Task(agent, args) maps to the ce_subagent extension tool +- Use ce_subagent for Compound Engineering workflows even when another extension also provides a generic subagent tool + - ce_subagent cwd must stay within the active workspace root; external cwd overrides are rejected +- Use ce_run_prompt to execute verified Pi prompts by alias +- Only compound-engineering:* and claude-home:* qualified Task refs are executable in Pi by default; foreign qualified Task refs remain rejected unless the compat runtime explicitly verifies a dispatchable namespace +- Use ce_list_capabilities to inspect the current verified Pi skills, prompts, and aliases available in this workspace - AskUserQuestion maps to the ask_user_question extension tool - MCP access uses MCPorter via mcporter_list and mcporter_call extension tools -- MCPorter config path: .pi/compound-engineering/mcporter.json (project) or ~/.pi/agent/compound-engineering/mcporter.json (global) +- MCPorter config path: compound-engineering/mcporter.json (project sync), .pi/compound-engineering/mcporter.json (project install), ~/.pi/agent/compound-engineering/mcporter.json (global), or the bundled packaged fallback when that layer is the verified authority +- MCPorter configPath overrides are ignored; Compound Engineering resolves the verified config automatically ` -export async function writePiBundle(outputRoot: string, bundle: PiBundle): Promise { - const paths = resolvePiPaths(outputRoot) +const PI_AGENTS_BLOCK_DISABLED_BODY = `## Compound Engineering (Pi compatibility) - await ensureDir(paths.skillsDir) - await ensureDir(paths.promptsDir) - await ensureDir(paths.extensionsDir) +This block is managed by compound-plugin. - for (const prompt of bundle.prompts) { - await writeText(path.join(paths.promptsDir, `${sanitizePathName(prompt.name)}.md`), prompt.content + "\n") - } +Compatibility notes: +- Compound Engineering compat tools are not currently installed at this root. +- Local compat tools should not be advertised from this root until the compat extension is present again. +- Verified global or bundled Compound Engineering fallbacks may still exist; use ce_list_capabilities to inspect the actual callable runtime surface. +` - for (const skill of bundle.skillDirs) { - await copySkillDir(skill.sourceDir, path.join(paths.skillsDir, sanitizePathName(skill.name)), transformContentForPi) - } +type PiManagedPublicationSnapshots = { + snapshotRoot: string | null + snapshots: Map +} + +export async function writePiBundle(outputRoot: string, bundle: PiBundle): Promise { + const policyFingerprint = getPiPolicyFingerprint() + const paths = resolvePiLayout(outputRoot, "install") + const prompts = bundle.prompts.map((prompt) => ({ + ...prompt, + emittedName: sanitizeSafePathName(prompt.name, "prompt name"), + })) + const generatedSkills = bundle.generatedSkills.map((skill) => ({ + ...skill, + emittedName: sanitizeSafePathName(skill.name, "generated skill name"), + })) + const skillDirs = bundle.skillDirs.map((skill) => ({ + ...skill, + emittedName: sanitizeSafePathName(skill.name, "skill name"), + })) + const extensions = bundle.extensions.map((extension) => ({ + ...extension, + emittedName: assertSafePathComponent(extension.name, "extension name"), + })) - for (const skill of bundle.generatedSkills) { - await writeText(path.join(paths.skillsDir, sanitizePathName(skill.name), "SKILL.md"), skill.content + "\n") + for (const prompt of prompts) { + assertPathWithinRoot(path.join(paths.promptsDir, `${prompt.emittedName}.md`), paths.root, "Pi prompt path") + } + for (const skill of [...generatedSkills, ...skillDirs]) { + assertPathWithinRoot(path.join(paths.skillsDir, skill.emittedName), paths.root, "Pi skill path") } + for (const extension of extensions) { + assertPathWithinRoot(path.join(paths.extensionsDir, extension.emittedName), paths.root, "Pi extension path") + } + + const legacyLayout = !samePiPath(paths.root, outputRoot) + ? resolvePiLayout(outputRoot, "sync") + : null + const trustInfo = await getPiManagedTrustInfo(paths) + const legacyTrustInfo = legacyLayout ? await getPiManagedTrustInfo(legacyLayout) : null + const previousState = trustInfo.state + const publicationSnapshots = await capturePiManagedPublicationSnapshots(paths) + + const installArtifacts = [ + ...prompts.map((prompt) => + createManagedArtifact(paths, "prompt", prompt.sourceName ?? prompt.name, prompt.emittedName)), + ...generatedSkills.map((skill) => + createManagedArtifact(paths, "generated-skill", skill.sourceName ?? skill.name, skill.emittedName)), + ...skillDirs.map((skill) => + createManagedArtifact(paths, "copied-skill", skill.sourceName ?? skill.name, skill.emittedName)), + ] + const legacyCustomRootCandidates = legacyLayout + ? collectLegacyArtifactCandidates(paths, installArtifacts, { legacyRoot: outputRoot }) + : [] + const preserveUnverifiedMalformedInstallMcporter = Boolean(bundle.mcporterConfig) + && !canUseVerifiedCleanup(trustInfo, "install") + && await inspectJsonObjectState(paths.mcporterConfigPath) === "invalid" - for (const extension of bundle.extensions) { - await writeText(path.join(paths.extensionsDir, extension.name), extension.content + "\n") + const nextState = replacePiManagedSection(previousState, "install", createPiManagedSection({ + nameMaps: bundle.nameMaps, + artifacts: installArtifacts, + mcpServers: preserveUnverifiedMalformedInstallMcporter ? [] : Object.keys(bundle.mcporterConfig?.mcpServers ?? {}), + sharedResources: { + compatExtension: bundle.extensions.length > 0, + mcporterConfig: preserveUnverifiedMalformedInstallMcporter ? false : Boolean(bundle.mcporterConfig), + }, + }), bundle.pluginName) + nextState.policyFingerprint = policyFingerprint + const syncCleanupVerified = canUseVerifiedCleanup(trustInfo, "sync") + const sharedSyncCompat = syncCleanupVerified && previousState?.sync.sharedResources.compatExtension === true + const sharedSyncMcporterConfig = syncCleanupVerified && previousState?.sync.sharedResources.mcporterConfig === true + const sharedSyncMcpServers = syncCleanupVerified ? new Set(previousState?.sync.mcpServers ?? []) : new Set() + const verifiedPreviousState = filterPiManagedStateForVerifiedSections(previousState, { + install: canUseVerifiedCleanup(trustInfo, "install"), + sync: syncCleanupVerified, + }) + const previousSkillArtifacts = verifiedPreviousState + ? [...verifiedPreviousState.install.artifacts, ...verifiedPreviousState.sync.artifacts] + : [] + const previousSkillArtifactsByName = new Map() + for (const artifact of previousSkillArtifacts) { + const key = `${artifact.kind}:${artifact.emittedName}` + const bucket = previousSkillArtifactsByName.get(key) ?? [] + bucket.push(artifact) + previousSkillArtifactsByName.set(key, bucket) } + const legacyCleanupPlan = legacyLayout && legacyTrustInfo + ? await planLegacyCustomRootInstallCleanup({ legacyLayout, legacyTrustInfo, artifactCandidates: legacyCustomRootCandidates }) + : null + const removeTrackedFileIfExists = async (filePath: string): Promise => { + await rememberPiManagedPublicationSnapshot(publicationSnapshots, filePath) + await removeFileIfExists(filePath) + } + const removeTrackedSkillDirectoryIfExists = async (dirPath: string): Promise => { + await rememberPiManagedPublicationSnapshot(publicationSnapshots, dirPath) + await removeSkillDirectoryIfExists(dirPath) + } + try { + await ensureManagedDir(paths.skillsDir) + await ensureManagedDir(paths.promptsDir) + await ensureManagedDir(paths.extensionsDir) + + const compatPath = path.join(paths.extensionsDir, "compound-engineering-compat.ts") + const preserveUntrustedCompat = !nextState.install.sharedResources.compatExtension + && !sharedSyncCompat + && !syncCleanupVerified + && await pathExists(compatPath) + const shouldPreserveAmbiguousMcporter = !syncCleanupVerified + && await pathExists(paths.mcporterConfigPath) + const installMcporterReplaceKeys = canUseVerifiedCleanup(trustInfo, "install") && !shouldPreserveAmbiguousMcporter + ? (previousState?.install.mcpServers ?? []).filter((serverName) => !sharedSyncMcpServers.has(serverName)) + : [] + + if (preserveUntrustedCompat) { + console.warn(`Warning: found ambiguous Pi shared resource at ${compatPath}; removing the live compat extension because sync ownership cannot be proven.`) + } + if (shouldPreserveAmbiguousMcporter && (previousState?.install.mcpServers.length ?? 0) > 0) { + console.warn(`Warning: found ambiguous mcporter.json at ${paths.mcporterConfigPath}; leaving it untouched because sync ownership cannot be proven.`) + } + + for (const prompt of prompts) { + const targetPath = path.join(paths.promptsDir, `${prompt.emittedName}.md`) + const nextContent = prompt.content + "\n" + const existing = await readText(targetPath).catch(() => null) + if (existing !== nextContent) { + await rememberPiManagedPublicationSnapshot(publicationSnapshots, targetPath) + } + await writeTextIfChanged(targetPath, nextContent, { existingContent: existing }) + } + + for (const skill of skillDirs) { + const targetDir = path.join(paths.skillsDir, skill.emittedName) + const previousArtifact = [ + ...(previousSkillArtifactsByName.get(`generated-skill:${skill.emittedName}`) ?? []), + ...(previousSkillArtifactsByName.get(`synced-skill:${skill.emittedName}`) ?? []), + ][0] + await copySkillDirForPi( + skill.sourceDir, + targetDir, + skill.name, + bundle.nameMaps, + { trustedRoot: skill.sourceDir }, + undefined, + { + onBeforeMutate: async (mode) => { + if (mode === "replace" || previousArtifact) { + await rememberPiManagedPublicationSnapshot(publicationSnapshots, targetDir) + } + if (previousArtifact) { + await removeSkillDirectoryIfExists(targetDir) + } + }, + }, + ) + } + + for (const skill of generatedSkills) { + const targetDir = path.join(paths.skillsDir, skill.emittedName) + const previousArtifact = [ + ...(previousSkillArtifactsByName.get(`copied-skill:${skill.emittedName}`) ?? []), + ...(previousSkillArtifactsByName.get(`synced-skill:${skill.emittedName}`) ?? []), + ][0] + if (previousArtifact) { + await rememberPiManagedPublicationSnapshot(publicationSnapshots, targetDir) + await removeSkillDirectoryIfExists(targetDir) + } + const targetPath = path.join(targetDir, "SKILL.md") + const nextContent = skill.content + "\n" + const existing = await readText(targetPath).catch(() => null) + if (existing !== nextContent) { + await rememberPiManagedPublicationSnapshot(publicationSnapshots, targetDir) + } + await writeTextIfChanged(targetPath, nextContent, { existingContent: existing }) + } + + for (const extension of extensions) { + const targetPath = path.join(paths.extensionsDir, extension.emittedName) + const nextContent = extension.content + "\n" + const existing = await readText(targetPath).catch(() => null) + if (existing !== nextContent) { + await rememberPiManagedPublicationSnapshot(publicationSnapshots, targetPath) + } + await writeTextIfChanged(targetPath, nextContent, { existingContent: existing }) + } - if (bundle.mcporterConfig) { - const backupPath = await backupFile(paths.mcporterConfigPath) - if (backupPath) { - console.log(`Backed up existing MCPorter config to ${backupPath}`) + if (bundle.mcporterConfig) { + await ensureManagedDir(path.dirname(paths.mcporterConfigPath)) + if (preserveUnverifiedMalformedInstallMcporter) { + console.warn(`Warning: found malformed legacy mcporter.json at ${paths.mcporterConfigPath}; leaving it untouched because install ownership cannot be proven.`) + } else { + const nextContent = JSON.stringify(bundle.mcporterConfig, null, 2) + "\n" + const existing = await readText(paths.mcporterConfigPath).catch(() => null) + if (existing !== nextContent) { + const backupPath = await backupFile(paths.mcporterConfigPath) + if (backupPath) { + console.log(`Backed up existing MCPorter config to ${backupPath}`) + } + } + await rememberPiManagedPublicationSnapshot(publicationSnapshots, paths.mcporterConfigPath) + await mergeJsonConfigAtKey({ + configPath: paths.mcporterConfigPath, + key: "mcpServers", + incoming: bundle.mcporterConfig.mcpServers, + replaceKeys: installMcporterReplaceKeys, + snapshotOnWrite: false, + }) + } + } else if (canUseVerifiedCleanup(trustInfo, "install") && (previousState?.install.mcpServers.length ?? 0) > 0) { + await rememberPiManagedPublicationSnapshot(publicationSnapshots, paths.mcporterConfigPath) + const result = await mergeJsonConfigAtKey({ + configPath: paths.mcporterConfigPath, + key: "mcpServers", + incoming: {}, + replaceKeys: installMcporterReplaceKeys, + snapshotOnWrite: false, + }) + + if (result.didWrite && result.isEmpty) { + await removeFileIfExists(paths.mcporterConfigPath) + } + } + + const compatContract = derivePiSharedResourceContract({ + nextOwns: nextState.install.sharedResources.compatExtension, + otherVerifiedOwner: sharedSyncCompat, + preserveUntrusted: preserveUntrustedCompat, + }) + const keepCompatExtension = compatContract.retain + if (!keepCompatExtension) { + await rememberPiManagedPublicationSnapshot(publicationSnapshots, compatPath) + await removeFileIfExists(compatPath) + } + + const agentsBefore = await readText(paths.agentsPath).catch(() => null) + const shouldAdvertiseCompatTools = compatContract.advertise + const agentsBlock = buildPiAgentsBlock(shouldAdvertiseCompatTools) + const nextAgents = agentsBefore === null ? agentsBlock + "\n" : upsertBlock(agentsBefore, agentsBlock) + if (nextAgents !== agentsBefore) { + await rememberPiManagedPublicationSnapshot(publicationSnapshots, paths.agentsPath) + } + await ensurePiAgentsBlock(paths.agentsPath, shouldAdvertiseCompatTools) + + await removeStaleManagedArtifacts( + paths, + filterPiManagedStateForVerifiedSections(previousState, { install: canUseVerifiedCleanup(trustInfo, "install") }), + nextState, + removeTrackedFileIfExists, + removeTrackedSkillDirectoryIfExists, + ) + + for (const warning of legacyCleanupPlan?.warnings ?? []) { + console.warn(warning) + } + await removeLegacyArtifactCandidates( + legacyCleanupPlan?.artifactCandidates ?? [], + removeTrackedFileIfExists, + removeTrackedSkillDirectoryIfExists, + ) + + if (legacyLayout && legacyCleanupPlan?.removeCompatExtension) { + await removeTrackedFileIfExists(path.join(legacyLayout.extensionsDir, "compound-engineering-compat.ts")) + } + + if (legacyLayout && legacyCleanupPlan && legacyCleanupPlan.pruneMcporterKeys.length > 0) { + await rememberPiManagedPublicationSnapshot(publicationSnapshots, legacyLayout.mcporterConfigPath) + const result = await mergeJsonConfigAtKey({ + configPath: legacyLayout.mcporterConfigPath, + key: "mcpServers", + incoming: {}, + replaceKeys: legacyCleanupPlan.pruneMcporterKeys, + snapshotOnWrite: false, + }) + + if (result.didWrite && result.isEmpty) { + await removeTrackedFileIfExists(legacyLayout.mcporterConfigPath) + } + } + + if (shouldWritePiManagedState(nextState)) { + const didWriteManagedState = await writePiManagedState(paths, nextState, { + install: true, + sync: canUseVerifiedCleanup(trustInfo, "sync"), + }) + if (didWriteManagedState) { + await rememberPiManagedPublicationSnapshot(publicationSnapshots, paths.managedManifestPath) + await rememberPiManagedPublicationSnapshot(publicationSnapshots, paths.verificationPath) + } + } else { + if (await pathExists(paths.managedManifestPath)) { + await rememberPiManagedPublicationSnapshot(publicationSnapshots, paths.managedManifestPath) + } + if (await pathExists(paths.verificationPath)) { + await rememberPiManagedPublicationSnapshot(publicationSnapshots, paths.verificationPath) + } + await removeFileIfExists(paths.managedManifestPath) + await removeFileIfExists(paths.verificationPath) } - await writeJson(paths.mcporterConfigPath, bundle.mcporterConfig) + } catch (error) { + await restorePiManagedPublicationSnapshots(publicationSnapshots) + throw error } + if (publicationSnapshots.snapshotRoot) { + await fs.rm(publicationSnapshots.snapshotRoot, { recursive: true, force: true }).catch(() => undefined) + } +} - await ensurePiAgentsBlock(paths.agentsPath) +async function capturePiManagedPublicationSnapshots( + paths: ReturnType, +): Promise { + await ensureManagedDir(paths.root) + return { snapshotRoot: null, snapshots: new Map() } } -function resolvePiPaths(outputRoot: string) { - const base = path.basename(outputRoot) +async function rememberPiManagedPublicationSnapshot( + rollback: PiManagedPublicationSnapshots, + targetPath: string, +): Promise { + if (rollback.snapshots.has(targetPath)) return + if (!rollback.snapshotRoot) { + await ensureManagedDir(path.dirname(targetPath)) + rollback.snapshotRoot = await fs.mkdtemp(path.join(path.dirname(targetPath), ".pi-publish-rollback-")) + } + rollback.snapshots.set(targetPath, await captureManagedPathSnapshot(targetPath, rollback.snapshotRoot)) +} - // Global install root: ~/.pi/agent - if (base === "agent") { - return { - skillsDir: path.join(outputRoot, "skills"), - promptsDir: path.join(outputRoot, "prompts"), - extensionsDir: path.join(outputRoot, "extensions"), - mcporterConfigPath: path.join(outputRoot, "compound-engineering", "mcporter.json"), - agentsPath: path.join(outputRoot, "AGENTS.md"), - } +async function restorePiManagedPublicationSnapshots(rollback: PiManagedPublicationSnapshots): Promise { + for (const snapshot of [...rollback.snapshots.values()].reverse()) { + await restoreManagedPathSnapshot(snapshot) + } + if (rollback.snapshotRoot) { + await fs.rm(rollback.snapshotRoot, { recursive: true, force: true }).catch(() => undefined) + } +} + +async function removeSkillDirectoryIfExists(dirPath: string): Promise { + await removeManagedPathIfExists(dirPath) +} + +async function inspectJsonObjectState(configPath: string): Promise<"missing" | "valid" | "invalid"> { + if (!(await pathExists(configPath))) { + return "missing" } - // Project local .pi directory - if (base === ".pi") { - return { - skillsDir: path.join(outputRoot, "skills"), - promptsDir: path.join(outputRoot, "prompts"), - extensionsDir: path.join(outputRoot, "extensions"), - mcporterConfigPath: path.join(outputRoot, "compound-engineering", "mcporter.json"), - agentsPath: path.join(outputRoot, "AGENTS.md"), + try { + const parsed = JSON.parse(await readText(configPath)) as unknown + if (parsed && typeof parsed === "object" && !Array.isArray(parsed)) { + return "valid" } + } catch { + return "invalid" } - // Custom output root -> nest under .pi - return { - skillsDir: path.join(outputRoot, ".pi", "skills"), - promptsDir: path.join(outputRoot, ".pi", "prompts"), - extensionsDir: path.join(outputRoot, ".pi", "extensions"), - mcporterConfigPath: path.join(outputRoot, ".pi", "compound-engineering", "mcporter.json"), - agentsPath: path.join(outputRoot, "AGENTS.md"), - } + return "invalid" } -async function ensurePiAgentsBlock(filePath: string): Promise { - const block = buildPiAgentsBlock() +export async function ensurePiAgentsBlock(filePath: string, enabled = true): Promise { + const block = buildPiAgentsBlock(enabled) if (!(await pathExists(filePath))) { - await writeText(filePath, block + "\n") + await writeTextIfChanged(filePath, block + "\n") return } - const existing = await readText(filePath) - const updated = upsertBlock(existing, block) - if (updated !== existing) { - await writeText(filePath, updated) + let snapshot: Awaited> | null = null + try { + const existing = await readText(filePath) + const updated = upsertBlock(existing, block) + if (updated !== existing) { + snapshot = await captureTextFileSnapshot(filePath) + await writeTextIfChanged(filePath, updated) + } + } catch (error) { + if (snapshot) { + await restoreTextFileSnapshot(snapshot) + } + throw error } } -function buildPiAgentsBlock(): string { - return [PI_AGENTS_BLOCK_START, PI_AGENTS_BLOCK_BODY.trim(), PI_AGENTS_BLOCK_END].join("\n") +export function buildPiAgentsBlock(enabled = true): string { + return [PI_AGENTS_BLOCK_START, (enabled ? PI_AGENTS_BLOCK_BODY : PI_AGENTS_BLOCK_DISABLED_BODY).trim(), PI_AGENTS_BLOCK_END].join("\n") } -function upsertBlock(existing: string, block: string): string { +export function upsertBlock(existing: string, block: string): string { const startIndex = existing.indexOf(PI_AGENTS_BLOCK_START) const endIndex = existing.indexOf(PI_AGENTS_BLOCK_END) diff --git a/src/templates/pi/compat-extension.ts b/src/templates/pi/compat-extension.ts index 8be4176fe..38eab6cc7 100644 --- a/src/templates/pi/compat-extension.ts +++ b/src/templates/pi/compat-extension.ts @@ -1,4 +1,9 @@ -export const PI_COMPAT_EXTENSION_SOURCE = `import fs from "node:fs" +import { createHash } from "crypto" + +const DEFAULT_PI_POLICY_FINGERPRINT = createHash("sha256").update("foreign-qualified-default-deny-v1").digest("hex") + +export const PI_COMPAT_EXTENSION_SOURCE = `import { createHash } from "node:crypto" +import fs from "node:fs" import os from "node:os" import path from "node:path" import { fileURLToPath } from "node:url" @@ -8,10 +13,18 @@ import { Type } from "@sinclair/typebox" const MAX_BYTES = 50 * 1024 const DEFAULT_SUBAGENT_TIMEOUT_MS = 10 * 60 * 1000 const MAX_PARALLEL_SUBAGENTS = 8 +const PI_MAX_NAME_LENGTH = 60 +const PI_POLICY_FINGERPRINT_ENV = "COMPOUND_ENGINEERING_PI_POLICY_FINGERPRINT" +const CURRENT_POLICY_FINGERPRINT = ${JSON.stringify(DEFAULT_PI_POLICY_FINGERPRINT)} + +function getCurrentPolicyFingerprint(): string { + const envOverride = process.env[PI_POLICY_FINGERPRINT_ENV]?.trim() + return envOverride || CURRENT_POLICY_FINGERPRINT +} type SubagentTask = { agent: string - task: string + task?: string cwd?: string } @@ -24,6 +37,94 @@ type SubagentResult = { stderr: string } +type PiAliasManifest = { + version?: number + nameMaps?: PiNameMaps + installPrompts?: PiLegacyArtifact[] + syncPrompts?: PiLegacyArtifact[] + generatedSkills?: PiLegacyArtifact[] + install?: PiAliasSection + sync?: PiAliasSection +} + +type PiLegacyArtifact = { + sourceName?: string + outputPath?: string +} + +type PiAliasSection = { + nameMaps?: PiNameMaps + artifacts?: Array<{ + kind?: string + sourceName?: string + emittedName?: string + relativePath?: string + }> + mcpServers?: string[] + sharedResources?: { + compatExtension?: boolean + mcporterConfig?: boolean + } +} + +type PiManagedVerificationRecord = { + version?: number + root?: string + manifestPath?: string + policyFingerprint?: string + install?: { + hash?: string + } + sync?: { + hash?: string + } +} + +type PiNameMaps = { + agents?: Record + skills?: Record + prompts?: Record +} + +type CachedAliasManifest = { + mtimeMs: number + size: number + manifest: PiAliasManifest | null +} + +type CachedAliasResolution = { + key: string + signatures: string + layers: ResolvedAliasLayer[] +} + +type AliasManifestSignatureHook = (filePath: string) => void | Promise + +type AliasManifestLoadResult = { + found: boolean + mtimeMs: number + size: number + manifest: PiAliasManifest | null +} + +type PiSectionName = "install" | "sync" + +type McporterAuthoritySource = "project-sync" | "project-install" | "global" | "bundled" + +type McporterCapabilityProvenance = { + status: "available" | "blocked-unverified-project-sync" | "blocked-unverified-project-install" | "bundled-unverified" | "absent" + authority: McporterAuthoritySource | null +} + +type ResolvedAliasLayer = { + searchRoot: string + manifestPath: string + manifest: PiAliasManifest + scope: "project" | "global" | "bundled" + verifiedInstall: boolean + verifiedSync: boolean +} + function truncate(value: string): string { const input = value ?? "" if (Buffer.byteLength(input, "utf8") <= MAX_BYTES) return input @@ -36,20 +137,268 @@ function shellEscape(value: string): string { } function normalizeName(value: string): string { - return String(value || "") - .trim() + const trimmed = String(value || "").trim() + if (!trimmed) return "" + + const leafName = trimmed.split(":").filter(Boolean).pop() ?? trimmed + + const normalized = leafName .toLowerCase() - .replace(/[^a-z0-9_-]+/g, "-") + .replace(/[\\/]+/g, "-") + .replace(/[:_\\s]+/g, "-") + .replace(/[^a-z0-9-]+/g, "-") .replace(/-+/g, "-") .replace(/^-+|-+$/g, "") + + return normalized + .slice(0, PI_MAX_NAME_LENGTH) + .replace(/-+$/g, "") } -function resolveBundledMcporterConfigPath(): string | undefined { +function isSafeManagedName(value: string): boolean { + const trimmed = String(value || "").trim() + if (!trimmed) return false + if (trimmed.length > 64) return false + return /^[a-z0-9]+(?:-[a-z0-9]+)*$/.test(trimmed) +} + +function resolveStateHome(): string { + return process.env.COMPOUND_ENGINEERING_HOME || os.homedir() +} + +function resolveUserHome(): string { + return process.env.HOME || os.homedir() +} + +function canonicalizeManagedPath(targetPath: string): string { + const resolved = path.resolve(targetPath) + const normalized = resolved.replace(/[\\/]+$/, "") + return normalized || resolved +} + +function canonicalizeExecutionPath(targetPath: string): string { + const resolved = path.resolve(targetPath) + try { + const realpath = fs.realpathSync.native ? fs.realpathSync.native(resolved) : fs.realpathSync(resolved) + return canonicalizeManagedPath(realpath) + } catch { + return canonicalizeManagedPath(resolved) + } +} + +function resolveMachineKeyPath(): string { + return path.join(resolveStateHome(), ".compound-engineering", "pi-managed-key") +} + +function readMachineKey(): string | null { + try { + return fs.readFileSync(resolveMachineKeyPath(), "utf8").trim() || null + } catch { + return null + } +} + +function resolveManagedManifestRoot(manifestPath: string): string { + return canonicalizeManagedPath(path.dirname(path.dirname(manifestPath))) +} + +function normalizeNameMapEntries(entries?: Record): Record { + const normalized: Record = {} + + for (const [alias, emittedName] of Object.entries(entries ?? {})) { + if (!alias || !isSafeManagedName(emittedName)) continue + normalized[alias] = emittedName + } + + return normalized +} + +function hasNameMaps(nameMaps?: PiNameMaps): boolean { + return Boolean( + Object.keys(nameMaps?.agents ?? {}).length + || Object.keys(nameMaps?.skills ?? {}).length + || Object.keys(nameMaps?.prompts ?? {}).length, + ) +} + +function normalizeMcpServers(servers?: string[]): string[] { + return [...new Set((servers ?? []).map((server) => String(server || "").trim()).filter(Boolean))].sort() +} + +function normalizeSharedResources(resources?: { compatExtension?: boolean; mcporterConfig?: boolean }) { + return { + compatExtension: resources?.compatExtension === true, + mcporterConfig: resources?.mcporterConfig === true, + } +} + +function normalizeRelativeArtifactPath(relativePath?: string): string | null { + const trimmed = String(relativePath || "").trim() + if (!trimmed || path.isAbsolute(trimmed)) return null + + const normalized = path.normalize(trimmed) + if (normalized === ".." || normalized.startsWith(".." + path.sep)) return null + return normalized +} + +function dedupeArtifacts(artifacts?: PiAliasSection["artifacts"]): NonNullable { + const byPath = new Map[number]>() + + for (const artifact of artifacts ?? []) { + const relativePath = normalizeRelativeArtifactPath(artifact?.relativePath) + byPath.set(String(artifact?.kind ?? "") + ":" + String(relativePath ?? ""), { + kind: artifact?.kind, + sourceName: artifact?.sourceName, + emittedName: artifact?.emittedName, + relativePath: relativePath ?? "", + }) + } + + return [...byPath.values()] +} + +function hasSectionData(section?: PiAliasSection): boolean { + const sharedResources = normalizeSharedResources(section?.sharedResources) + return hasNameMaps(section?.nameMaps) + || (section?.artifacts?.length ?? 0) > 0 + || (section?.mcpServers?.length ?? 0) > 0 + || sharedResources.compatExtension + || sharedResources.mcporterConfig +} + +function createSectionHashPayload(root: string, section?: PiAliasSection) { + return { + root: path.resolve(root), + nameMaps: { + agents: normalizeNameMapEntries(section?.nameMaps?.agents), + skills: normalizeNameMapEntries(section?.nameMaps?.skills), + prompts: normalizeNameMapEntries(section?.nameMaps?.prompts), + }, + artifacts: dedupeArtifacts(section?.artifacts).map((artifact) => ({ + kind: artifact.kind, + sourceName: artifact.sourceName, + emittedName: artifact.emittedName, + relativePath: normalizeRelativeArtifactPath(artifact.relativePath), + })), + mcpServers: normalizeMcpServers(section?.mcpServers), + sharedResources: normalizeSharedResources(section?.sharedResources), + } +} + +function resolveVerificationPath(root: string, manifestPath: string): string { + const identity = createHash("sha256") + .update(canonicalizeManagedPath(root) + ":" + canonicalizeManagedPath(manifestPath)) + .digest("hex") + return path.join(resolveStateHome(), ".compound-engineering", "pi-managed", identity + ".json") +} + +function hashManifestSection(root: string, section?: PiAliasSection): string { + const payload = JSON.stringify(createSectionHashPayload(root, section)) + + return createHash("sha256").update(payload).digest("hex") +} + +function getEffectiveSectionForVerification(root: string, manifest: PiAliasManifest, sectionName: PiSectionName): PiAliasSection | undefined { + const artifacts = normalizeLegacyArtifactsForSection(root, manifest, sectionName) + const section = manifest[sectionName] + const effectiveSection = section + ? { ...section, artifacts: dedupeArtifacts([...(section.artifacts ?? []), ...artifacts]) } + : undefined + + if (effectiveSection) return effectiveSection + + const legacyNameMaps = filterLegacyNameMapsForSection(manifest.nameMaps, sectionName) + if (!hasNameMaps(legacyNameMaps) && artifacts.length === 0) return undefined + + return { nameMaps: legacyNameMaps, artifacts } +} + +function normalizeLegacyArtifactsForSection(root: string, manifest: PiAliasManifest, sectionName: PiSectionName): NonNullable { + const artifacts: NonNullable = [] + + if (sectionName === "install") { + for (const artifact of manifest.installPrompts ?? []) { + const normalized = normalizeLegacyArtifact(root, artifact, "prompt") + if (normalized) artifacts.push(normalized) + } + for (const artifact of manifest.generatedSkills ?? []) { + const normalized = normalizeLegacyArtifact(root, artifact, "generated-skill") + if (normalized) artifacts.push(normalized) + } + } + + if (sectionName === "sync") { + for (const artifact of manifest.syncPrompts ?? []) { + const normalized = normalizeLegacyArtifact(root, artifact, "prompt") + if (normalized) artifacts.push(normalized) + } + } + + return artifacts +} + +function normalizeLegacyArtifact( + root: string, + artifact: PiLegacyArtifact, + kind: NonNullable[number]["kind"], +): NonNullable[number] | null { + if (!artifact?.sourceName || !artifact?.outputPath) return null + + const absolutePath = canonicalizeManagedPath(artifact.outputPath) + const canonicalRoot = canonicalizeManagedPath(root) + if (absolutePath !== canonicalRoot && !absolutePath.startsWith(canonicalRoot + path.sep)) return null + const emittedName = kind === "prompt" + ? path.basename(absolutePath, path.extname(absolutePath)) + : path.basename(absolutePath) + + if (!isSafeManagedName(emittedName)) return null + + return { + kind, + sourceName: artifact.sourceName, + emittedName, + relativePath: normalizeRelativeArtifactPath(path.relative(canonicalRoot, absolutePath)) ?? "", + } +} + +function isVerifiedManifestSection(manifestPath: string, manifest: PiAliasManifest, sectionName: "install" | "sync"): boolean { + const machineKey = readMachineKey() + if (!machineKey) return false + + const root = resolveManagedManifestRoot(manifestPath) + const verificationPath = resolveVerificationPath(root, manifestPath) + + try { + const verification = JSON.parse(fs.readFileSync(verificationPath, "utf8")) as PiManagedVerificationRecord + if (verification.version !== 1) return false + if (verification.root !== canonicalizeManagedPath(root)) return false + if (verification.manifestPath !== canonicalizeManagedPath(manifestPath)) return false + const currentPolicyFingerprint = getCurrentPolicyFingerprint() + if (manifest.policyFingerprint !== currentPolicyFingerprint) return false + if (verification.policyFingerprint !== currentPolicyFingerprint) return false + + const scopedHash = verification[sectionName]?.hash + if (!scopedHash || !scopedHash.startsWith(machineKey + ":")) return false + + return scopedHash === machineKey + ":" + hashManifestSection(root, getEffectiveSectionForVerification(root, manifest, sectionName)) + } catch { + return false + } +} + +const aliasManifestCache = new Map() +const aliasResolutionCache = new Map() +let aliasManifestSignatureHook: AliasManifestSignatureHook | null = null + +export function setAliasManifestSignatureHookForTests(hook: AliasManifestSignatureHook | null): void { + aliasManifestSignatureHook = hook +} + +function resolveBundledAliasManifestPath(): string | undefined { try { const extensionDir = path.dirname(fileURLToPath(import.meta.url)) const candidates = [ - path.join(extensionDir, "..", "pi-resources", "compound-engineering", "mcporter.json"), - path.join(extensionDir, "..", "compound-engineering", "mcporter.json"), + path.join(extensionDir, "..", "pi-resources", "compound-engineering", "compound-engineering-managed.json"), ] for (const candidate of candidates) { @@ -62,49 +411,704 @@ function resolveBundledMcporterConfigPath(): string | undefined { return undefined } +function resolveAliasManifestPaths(cwd: string, walkedPaths = walkAliasSearchRoots(cwd)): ResolvedAliasLayer[] { + const layers: ResolvedAliasLayer[] = [] + const bundledManifestPath = resolveBundledAliasManifestPath() + const bundledRoot = bundledManifestPath + ? path.resolve(path.dirname(path.dirname(path.dirname(bundledManifestPath)))) + : null + + for (const current of walkedPaths) { + const projectPaths = [ + path.join(current, "compound-engineering", "compound-engineering-managed.json"), + path.join(current, ".pi", "compound-engineering", "compound-engineering-managed.json"), + ] + + for (const projectPath of projectPaths) { + if (!fs.existsSync(projectPath)) continue + if (bundledRoot && path.resolve(current) === bundledRoot) continue + const loaded = loadAliasManifestFromPath(projectPath) + if (loaded.manifest) { + layers.push({ + searchRoot: current, + manifestPath: projectPath, + manifest: loaded.manifest, + scope: "project", + verifiedInstall: isVerifiedManifestSection(projectPath, loaded.manifest, "install"), + verifiedSync: isVerifiedManifestSection(projectPath, loaded.manifest, "sync"), + }) + } + } + } + + const globalPath = path.join(resolveUserHome(), ".pi", "agent", "compound-engineering", "compound-engineering-managed.json") + if (fs.existsSync(globalPath)) { + const loaded = loadAliasManifestFromPath(globalPath) + if (loaded.manifest) { + layers.push({ + searchRoot: globalPath, + manifestPath: globalPath, + manifest: loaded.manifest, + scope: "global", + verifiedInstall: isVerifiedManifestSection(globalPath, loaded.manifest, "install"), + verifiedSync: isVerifiedManifestSection(globalPath, loaded.manifest, "sync"), + }) + } + } + + return layers +} + +function walkAliasSearchRoots(cwd: string): string[] { + const walked = walkUpPaths(cwd) + const searchRoots: string[] = [] + + for (const current of walked) { + searchRoots.push(current) + const hasProjectManifest = fs.existsSync(path.join(current, "compound-engineering", "compound-engineering-managed.json")) + || fs.existsSync(path.join(current, ".pi", "compound-engineering", "compound-engineering-managed.json")) + if (hasProjectManifest) break + } + + return searchRoots +} + +function isNestedProjectManifestPath(manifestPath: string): boolean { + return manifestPath.includes(path.sep + ".pi" + path.sep + "compound-engineering" + path.sep) +} + +function loadAliasManifest(cwd: string): PiAliasManifest | null { + return resolveResolvedAliasLayers(cwd)[0]?.manifest ?? null +} + +function loadAliasManifestFromPath(filePath: string): AliasManifestLoadResult { + if (!filePath) { + return { found: false, mtimeMs: -1, size: -1, manifest: null } + } + + try { + const stats = fs.statSync(filePath) + const cached = aliasManifestCache.get(filePath) + if (cached && cached.mtimeMs === stats.mtimeMs && cached.size === stats.size) { + return { found: true, mtimeMs: cached.mtimeMs, size: cached.size, manifest: cached.manifest } + } + + const manifest = JSON.parse(fs.readFileSync(filePath, "utf8")) as PiAliasManifest + aliasManifestCache.set(filePath, { mtimeMs: stats.mtimeMs, size: stats.size, manifest }) + return { found: true, mtimeMs: stats.mtimeMs, size: stats.size, manifest } + } catch { + aliasManifestCache.delete(filePath) + return { found: fs.existsSync(filePath), mtimeMs: -1, size: -1, manifest: null } + } +} + +function resolveResolvedAliasLayers(cwd: string): ResolvedAliasLayer[] { + const resolvedCwd = canonicalizeManagedPath(cwd) + const walkedPaths = walkAliasSearchRoots(resolvedCwd) + const cacheKey = walkedPaths.join("|") + const signatures = buildAliasResolutionSignatures(walkedPaths) + const cached = aliasResolutionCache.get(cacheKey) + if (cached?.key === cacheKey && cached.signatures === signatures) return cached.layers + + const layers = resolveAliasManifestPaths(resolvedCwd, walkedPaths) + aliasResolutionCache.set(cacheKey, { key: cacheKey, signatures, layers }) + return layers +} + +function buildAliasResolutionSignatures(walkedPaths: string[]): string { + const signatures: string[] = [] + signatures.push("policy:" + getCurrentPolicyFingerprint()) + + for (const current of walkedPaths) { + signatures.push(readAliasManifestSignature(path.join(current, "compound-engineering", "compound-engineering-managed.json"), true)) + signatures.push(readAliasManifestSignature(path.join(current, ".pi", "compound-engineering", "compound-engineering-managed.json"), true)) + } + + signatures.push(readAliasManifestSignature(path.join(resolveUserHome(), ".pi", "agent", "compound-engineering", "compound-engineering-managed.json"), true)) + + const bundledPath = resolveBundledAliasManifestPath() + if (bundledPath) { + signatures.push(readAliasManifestSignature(bundledPath, false)) + } + + return signatures.join("|") +} + +function readAliasManifestSignature(filePath: string, includeTrustInputs: boolean): string { + aliasManifestSignatureHook?.(filePath) + const loaded = loadAliasManifestFromPath(filePath) + if (!loaded.found) return filePath + ":missing" + if (!loaded.manifest) return filePath + ":invalid" + + const manifestSignature = filePath + ":" + loaded.mtimeMs + ":" + loaded.size + if (!includeTrustInputs) { + return manifestSignature + } + + const root = resolveManagedManifestRoot(filePath) + return [ + manifestSignature, + readSmallFileSignature(resolveVerificationPath(root, filePath)), + readSmallFileSignature(resolveMachineKeyPath()), + ].join(":") +} + +function readSmallFileSignature(filePath: string): string { + try { + const stats = fs.statSync(filePath) + return filePath + ":" + stats.mtimeMs + ":" + stats.size + } catch { + return filePath + ":missing" + } +} + +function mergeNameMaps(primary?: PiNameMaps, secondary?: PiNameMaps): PiNameMaps { + return { + agents: { ...(primary?.agents ?? {}), ...(secondary?.agents ?? {}) }, + skills: { ...(primary?.skills ?? {}), ...(secondary?.skills ?? {}) }, + prompts: { ...(primary?.prompts ?? {}), ...(secondary?.prompts ?? {}) }, + } +} + +function getNearestProjectLayersForSection(layers: ResolvedAliasLayer[], sectionName: PiSectionName): { + layers: ResolvedAliasLayer[] + blockedByProject: boolean +} { + let nearestRoot: string | null = null + const candidateLayers: ResolvedAliasLayer[] = [] + + for (const layer of layers) { + if (layer.scope !== "project") continue + + if (!nearestRoot) { + nearestRoot = layer.searchRoot + } + + if (layer.searchRoot !== nearestRoot) break + if (!hasSectionData(layer.manifest[sectionName]) && !hasNameMaps(filterLegacyNameMapsForSection(layer.manifest.nameMaps, sectionName))) { + continue + } + candidateLayers.push(layer) + } + + if (candidateLayers.length === 0) { + return { layers: [], blockedByProject: false } + } + + if (sectionName === "install") { + const nestedVerifiedLayers = candidateLayers.filter((layer) => isNestedProjectManifestPath(layer.manifestPath) && layer.verifiedInstall) + if (nestedVerifiedLayers.length > 0) { + return { layers: nestedVerifiedLayers, blockedByProject: false } + } + + const directVerifiedLayers = candidateLayers.filter((layer) => !isNestedProjectManifestPath(layer.manifestPath) && layer.verifiedInstall) + if (directVerifiedLayers.length > 0) { + return { layers: directVerifiedLayers, blockedByProject: false } + } + + return { layers: [], blockedByProject: true } + } + + const verifiedLayers = candidateLayers.filter((layer) => layer.verifiedSync) + if (verifiedLayers.length > 0) { + return { layers: verifiedLayers, blockedByProject: false } + } + + return { layers: [], blockedByProject: true } +} + +function getEffectiveNameMaps(manifest: PiAliasManifest | null): PiNameMaps | null { + if (!manifest) return null + return mergeNameMaps(mergeNameMaps(manifest.install?.nameMaps, manifest.sync?.nameMaps), manifest.nameMaps) +} + +function getSectionNameMapsWithLegacyFallback(manifest: PiAliasManifest | null, sectionName: PiSectionName): PiNameMaps { + const section = manifest?.[sectionName] + if (section) { + return section.nameMaps ?? {} + } + + return filterLegacyNameMapsForSection(manifest?.nameMaps, sectionName) +} + +function filterLegacyNameMapsForSection(nameMaps: PiNameMaps | undefined, sectionName: PiSectionName): PiNameMaps { + const namespace = sectionName === "install" ? "compound-engineering:" : "claude-home:" + return { + agents: filterLegacyNameMapEntries(nameMaps?.agents, namespace), + skills: filterLegacyNameMapEntries(nameMaps?.skills, namespace), + prompts: filterLegacyNameMapEntries(nameMaps?.prompts, namespace), + } +} + +function filterLegacyNameMapEntries(entries: Record | undefined, namespace: string): Record { + const filtered: Record = {} + + for (const [alias, emittedName] of Object.entries(entries ?? {})) { + if (!alias.startsWith(namespace) || !isSafeManagedName(emittedName)) continue + filtered[alias] = emittedName + } + + return filtered +} + +function getNamespaceScopedNameMaps(cwd: string): { + layers: ResolvedAliasLayer[] + install: PiNameMaps[] + sync: PiNameMaps[] + unqualifiedTiers: PiNameMaps[][] + unqualified: PiNameMaps[] +} { + const layers = resolveResolvedAliasLayers(cwd) + const nearestInstall = getNearestProjectLayersForSection(layers, "install") + const nearestSync = getNearestProjectLayersForSection(layers, "sync") + const installLayers = nearestInstall.layers.length > 0 + ? nearestInstall.layers + : nearestInstall.blockedByProject + ? [] + : layers.filter((layer) => layer.scope !== "project" && layer.verifiedInstall) + const syncLayers = nearestSync.layers.length > 0 + ? nearestSync.layers + : nearestSync.blockedByProject + ? [] + : layers.filter((layer) => layer.scope !== "project" && layer.verifiedSync) + return { + layers, + install: installLayers + .map((layer) => getSectionNameMapsWithLegacyFallback(layer.manifest, "install")) + .filter((maps) => Object.keys(maps.agents ?? {}).length || Object.keys(maps.skills ?? {}).length || Object.keys(maps.prompts ?? {}).length), + sync: syncLayers + .map((layer) => getSectionNameMapsWithLegacyFallback(layer.manifest, "sync")) + .filter((maps) => Object.keys(maps.agents ?? {}).length || Object.keys(maps.skills ?? {}).length || Object.keys(maps.prompts ?? {}).length), + // Unqualified runtime names are install-facing. Sync aliases remain qualified-only. + unqualifiedTiers: buildSectionAliasTiers(installLayers, "install"), + unqualified: installLayers + .map((layer) => getSectionNameMapsWithLegacyFallback(layer.manifest, "install")) + .filter((maps) => Object.keys(maps.agents ?? {}).length || Object.keys(maps.skills ?? {}).length || Object.keys(maps.prompts ?? {}).length), + } +} + +function getCachedNamespaceScopedNameMaps( + cache: Map>, + cwd: string, +): ReturnType { + const key = canonicalizeManagedPath(cwd) + const cached = cache.get(key) + if (cached) return cached + const resolved = getNamespaceScopedNameMaps(cwd) + cache.set(key, resolved) + return resolved +} + +function listCurrentCapabilities(cwd: string) { + const scoped = getNamespaceScopedNameMaps(cwd) + const mcporter = resolveMcporterConfigInfo(cwd) + const unique = (values: string[]) => [...new Set(values)].sort() + + const flatten = (maps: PiNameMaps[], key: keyof PiNameMaps) => + unique(maps.flatMap((map) => Object.keys(map[key] ?? {}))) + + return { + install: { + agents: flatten(scoped.install, "agents"), + skills: flatten(scoped.install, "skills"), + prompts: flatten(scoped.install, "prompts"), + }, + sync: { + agents: flatten(scoped.sync, "agents"), + skills: flatten(scoped.sync, "skills"), + prompts: flatten(scoped.sync, "prompts"), + }, + unqualified: { + agents: flatten(scoped.unqualified, "agents"), + skills: flatten(scoped.unqualified, "skills"), + prompts: flatten(scoped.unqualified, "prompts"), + }, + shared: { + mcporter: { + available: Boolean(mcporter.path), + source: mcporter.source, + servers: mcporter.servers, + provenance: mcporter.provenance, + }, + }, + } +} + +function resolveExactAlias(maps: PiNameMaps[], key: string, type: keyof PiNameMaps): string | undefined { + let match: string | undefined + + for (const map of maps) { + const value = map[type]?.[key] + if (!value) continue + if (!match) { + match = value + continue + } + if (match !== value) { + throw new Error("Conflicting qualified subagent target: " + key) + } + } + + return match +} + +function buildSectionAliasTiers(layers: ResolvedAliasLayer[], sectionName: PiSectionName): PiNameMaps[][] { + const tiers: PiNameMaps[][] = [] + let currentKey: string | null = null + let currentTier: PiNameMaps[] = [] + + for (const layer of layers) { + const maps = getSectionNameMapsWithLegacyFallback(layer.manifest, sectionName) + const hasEntries = Object.keys(maps.agents ?? {}).length || Object.keys(maps.skills ?? {}).length || Object.keys(maps.prompts ?? {}).length + if (!hasEntries) continue + + const tierKey = layer.scope + ":" + canonicalizeExecutionPath(layer.searchRoot) + if (tierKey !== currentKey) { + if (currentTier.length > 0) tiers.push(currentTier) + currentKey = tierKey + currentTier = [] + } + currentTier.push(maps) + } + + if (currentTier.length > 0) tiers.push(currentTier) + return tiers +} + +function resolveUnqualifiedAlias(tiers: PiNameMaps[][], key: string, type: keyof PiNameMaps): string | undefined { + for (const tier of tiers) { + const match = resolveExactAlias(tier, key, type) + if (match) return match + } + return undefined +} + +function resolveAgentName(cwd: string, value: string, scopedMaps = getNamespaceScopedNameMaps(cwd)): string { + const trimmed = String(value || "").trim() + if (!trimmed) return "" + + const namespace = trimmed.split(":").filter(Boolean)[0] ?? "" + const qualified = trimmed.includes(":") + + if (qualified && namespace === "compound-engineering") { + const exactAgent = resolveExactAlias(scopedMaps.install, trimmed, "agents") + if (exactAgent) return exactAgent + + const exactSkill = resolveExactAlias(scopedMaps.install, trimmed, "skills") + if (exactSkill) return exactSkill + + throw new Error("Unknown qualified subagent target: " + trimmed) + } + + if (qualified && namespace === "claude-home") { + const exactAgent = resolveExactAlias(scopedMaps.sync, trimmed, "agents") + if (exactAgent) return exactAgent + + const exactSkill = resolveExactAlias(scopedMaps.sync, trimmed, "skills") + if (exactSkill) return exactSkill + + throw new Error("Unknown qualified subagent target: " + trimmed) + } + + const exactAgent = resolveUnqualifiedAlias(scopedMaps.unqualifiedTiers, trimmed, "agents") + if (exactAgent) return exactAgent + + const exactSkill = resolveUnqualifiedAlias(scopedMaps.unqualifiedTiers, trimmed, "skills") + if (exactSkill) return exactSkill + + if (trimmed.includes(":")) { + throw new Error("Unknown qualified subagent target: " + trimmed) + } + + throw new Error("Unknown subagent target: " + trimmed) +} + +function resolvePromptName(cwd: string, value: string, scopedMaps = getNamespaceScopedNameMaps(cwd)): string { + const trimmed = String(value || "").trim() + if (!trimmed) return "" + + const namespace = trimmed.split(":").filter(Boolean)[0] ?? "" + const qualified = trimmed.includes(":") + + if (qualified && namespace === "compound-engineering") { + const exactPrompt = resolveExactAlias(scopedMaps.install, trimmed, "prompts") + if (exactPrompt) return exactPrompt + throw new Error("Unknown qualified prompt target: " + trimmed) + } + + if (qualified && namespace === "claude-home") { + const exactPrompt = resolveExactAlias(scopedMaps.sync, trimmed, "prompts") + if (exactPrompt) return exactPrompt + throw new Error("Unknown qualified prompt target: " + trimmed) + } + + const exactPrompt = resolveUnqualifiedAlias(scopedMaps.unqualifiedTiers, trimmed, "prompts") + if (exactPrompt) return exactPrompt + + if (trimmed.includes(":")) { + throw new Error("Unknown qualified prompt target: " + trimmed) + } + + throw new Error("Unknown prompt target: " + trimmed) +} + +function resolveMcporterConfigInfo(cwd: string): { + path?: string + source: McporterAuthoritySource | null + servers: string[] + provenance: McporterCapabilityProvenance +} { + for (const current of walkUpPaths(cwd)) { + const syncConfigPath = path.join(current, "compound-engineering", "mcporter.json") + const syncManifestPath = path.join(current, "compound-engineering", "compound-engineering-managed.json") + if (fs.existsSync(syncConfigPath)) { + if (!isTrustedMcporterConfigOwner(syncManifestPath, "sync")) { + return { + source: null, + servers: [], + provenance: { status: "blocked-unverified-project-sync", authority: null }, + } + } + return { + path: syncConfigPath, + source: "project-sync", + servers: readMcporterServerNames(syncConfigPath), + provenance: { status: "available", authority: "project-sync" }, + } + } + + const installConfigPath = path.join(current, ".pi", "compound-engineering", "mcporter.json") + const installManifestPath = path.join(current, ".pi", "compound-engineering", "compound-engineering-managed.json") + if (fs.existsSync(installConfigPath)) { + if (!isTrustedMcporterConfigOwner(installManifestPath, "install")) { + return { + source: null, + servers: [], + provenance: { status: "blocked-unverified-project-install", authority: null }, + } + } + return { + path: installConfigPath, + source: "project-install", + servers: readMcporterServerNames(installConfigPath), + provenance: { status: "available", authority: "project-install" }, + } + } + } + + const globalPath = path.join(resolveUserHome(), ".pi", "agent", "compound-engineering", "mcporter.json") + const globalManifestPath = path.join(resolveUserHome(), ".pi", "agent", "compound-engineering", "compound-engineering-managed.json") + if (fs.existsSync(globalPath) && (isTrustedMcporterConfigOwner(globalManifestPath, "install") || isTrustedMcporterConfigOwner(globalManifestPath, "sync"))) { + return { + path: globalPath, + source: "global", + servers: readMcporterServerNames(globalPath), + provenance: { status: "available", authority: "global" }, + } + } + + const bundled = resolveBundledMcporterConfigInfo() + if (bundled.path) { + return { + path: bundled.path, + source: "bundled", + servers: bundled.servers, + provenance: { status: "available", authority: "bundled" }, + } + } + if (bundled.status === "bundled-unverified") { + return { + source: null, + servers: [], + provenance: { status: "bundled-unverified", authority: null }, + } + } + + return { + source: null, + servers: [], + provenance: { status: "absent", authority: null }, + } +} + +function readMcporterServerNames(configPath: string): string[] { + try { + const parsed = JSON.parse(fs.readFileSync(configPath, "utf8")) as { mcpServers?: Record } + return Object.keys(parsed.mcpServers ?? {}).sort() + } catch { + return [] + } +} + +function resolveBundledMcporterConfigPath(): string | undefined { + return resolveBundledMcporterConfigInfo().path +} + +function resolveBundledMcporterConfigInfo(): { + path?: string + status: "available" | "bundled-unverified" | "absent" + servers: string[] +} { + const bundledManifestPath = resolveBundledAliasManifestPath() + let bundledConfigPath: string | undefined + + try { + const extensionDir = path.dirname(fileURLToPath(import.meta.url)) + const candidates = [ + path.join(extensionDir, "..", "pi-resources", "compound-engineering", "mcporter.json"), + ] + + for (const candidate of candidates) { + if (fs.existsSync(candidate)) { + bundledConfigPath = candidate + break + } + } + } catch { + // noop: bundled path is best-effort fallback + } + + if (!bundledManifestPath && !bundledConfigPath) { + return { status: "absent", servers: [] } + } + if (!bundledManifestPath || !bundledConfigPath) { + return { status: "bundled-unverified", servers: [] } + } + + const loaded = loadAliasManifestFromPath(bundledManifestPath) + if (!loaded.manifest) { + return { status: "bundled-unverified", servers: [] } + } + + if (!normalizeSharedResources(loaded.manifest.install?.sharedResources).mcporterConfig) { + return { status: "bundled-unverified", servers: [] } + } + if (loaded.manifest.policyFingerprint !== getCurrentPolicyFingerprint()) { + return { status: "bundled-unverified", servers: [] } + } + + return { + path: bundledConfigPath, + status: "available", + servers: readMcporterServerNames(bundledConfigPath), + } +} + function resolveMcporterConfigPath(cwd: string, explicit?: string): string | undefined { if (explicit && explicit.trim()) { - return path.resolve(explicit) + console.warn("Warning: mcporter configPath is deprecated and ignored; Compound Engineering will resolve the verified MCPorter config automatically.") } - const projectPath = path.join(cwd, ".pi", "compound-engineering", "mcporter.json") - if (fs.existsSync(projectPath)) return projectPath + return resolveMcporterConfigInfo(cwd).path +} + +function isTrustedMcporterConfigOwner(manifestPath: string, sectionName: PiSectionName): boolean { + const loaded = loadAliasManifestFromPath(manifestPath) + if (!loaded.manifest) return false - const globalPath = path.join(os.homedir(), ".pi", "agent", "compound-engineering", "mcporter.json") - if (fs.existsSync(globalPath)) return globalPath + if (!normalizeSharedResources(loaded.manifest[sectionName]?.sharedResources).mcporterConfig) { + return false + } - return resolveBundledMcporterConfigPath() + return isVerifiedManifestSection(manifestPath, loaded.manifest, sectionName) } -function resolveTaskCwd(baseCwd: string, taskCwd?: string): string { +function resolveTaskCwd( + baseCwd: string, + taskCwd?: string, + scopedMaps = getNamespaceScopedNameMaps(baseCwd), +): string { if (!taskCwd || !taskCwd.trim()) return baseCwd - const expanded = taskCwd === "~" - ? os.homedir() - : taskCwd.startsWith("~" + path.sep) - ? path.join(os.homedir(), taskCwd.slice(2)) - : taskCwd - return path.resolve(baseCwd, expanded) + const trimmed = taskCwd.trim() + if (trimmed === "~" || trimmed.startsWith("~" + path.sep)) { + throw new Error("ce_subagent cwd is outside the active workspace") + } + + const workspaceRoot = resolveWorkspaceRoot(baseCwd, scopedMaps) + const candidate = canonicalizeExecutionPath(path.isAbsolute(trimmed) ? trimmed : path.resolve(baseCwd, trimmed)) + if (candidate !== workspaceRoot && !candidate.startsWith(workspaceRoot + path.sep)) { + throw new Error("ce_subagent cwd is outside the active workspace") + } + + return candidate } -async function runSingleSubagent( - pi: ExtensionAPI, +function resolveWorkspaceRoot(cwd: string, scopedMaps = getNamespaceScopedNameMaps(cwd)): string { + const resolvedCwd = canonicalizeExecutionPath(cwd) + const projectLayers = scopedMaps.layers.filter((layer) => layer.scope === "project") + if (projectLayers.length === 0) { + return findWorkspaceRootFromFilesystem(resolvedCwd) + } + + const nearestRoot = canonicalizeExecutionPath(projectLayers[0]!.searchRoot) + const hasAuthoritativeLayer = projectLayers.some((layer) => + canonicalizeExecutionPath(layer.searchRoot) === nearestRoot && (layer.verifiedInstall || layer.verifiedSync)) + + return hasAuthoritativeLayer ? nearestRoot : findWorkspaceRootFromFilesystem(resolvedCwd) +} + +function findWorkspaceRootFromFilesystem(cwd: string): string { + for (const candidate of walkUpPaths(cwd)) { + if (hasWorkspaceMarker(candidate)) { + return candidate + } + } + + return cwd +} + +function hasWorkspaceMarker(candidate: string): boolean { + return [".git", "package.json", "bunfig.toml", "tsconfig.json"].some((entry) => + fs.existsSync(path.join(candidate, entry))) +} + +type PreparedSubagentTask = { + agent: string + taskText: string + cwd: string +} + +function prepareSubagentTaskWithCache( baseCwd: string, task: SubagentTask, - signal?: AbortSignal, - timeoutMs = DEFAULT_SUBAGENT_TIMEOUT_MS, -): Promise { - const agent = normalizeName(task.agent) + scopedMapCache: Map>, +): PreparedSubagentTask { + const baseScopedMaps = getCachedNamespaceScopedNameMaps(scopedMapCache, baseCwd) + const cwd = resolveTaskCwd(baseCwd, task.cwd, baseScopedMaps) + const agent = resolveAgentName(cwd, task.agent, getCachedNamespaceScopedNameMaps(scopedMapCache, cwd)) if (!agent) { throw new Error("Subagent task is missing a valid agent name") } - const taskText = String(task.task ?? "").trim() - if (!taskText) { - throw new Error("Subagent task for " + agent + " is empty") + return { + agent, + cwd, + taskText: String(task.task ?? "").trim(), } +} + +function walkUpPaths(start: string): string[] { + const paths: string[] = [] + let current = canonicalizeManagedPath(start) - const cwd = resolveTaskCwd(baseCwd, task.cwd) - const prompt = "/skill:" + agent + " " + taskText + while (true) { + paths.push(current) + const parent = path.dirname(current) + if (parent === current) break + current = parent + } + + return paths +} + +async function runSingleSubagent( + pi: ExtensionAPI, + prepared: PreparedSubagentTask, + signal?: AbortSignal, + timeoutMs = DEFAULT_SUBAGENT_TIMEOUT_MS, +): Promise { + const { cwd, agent, taskText } = prepared + const prompt = taskText ? "/skill:" + agent + " " + taskText : "/skill:" + agent const script = "cd " + shellEscape(cwd) + " && pi --no-session -p " + shellEscape(prompt) const result = await pi.exec("bash", ["-lc", script], { signal, timeout: timeoutMs }) @@ -120,8 +1124,7 @@ async function runSingleSubagent( async function runParallelSubagents( pi: ExtensionAPI, - baseCwd: string, - tasks: SubagentTask[], + tasks: PreparedSubagentTask[], signal?: AbortSignal, timeoutMs = DEFAULT_SUBAGENT_TIMEOUT_MS, maxConcurrency = 4, @@ -139,7 +1142,7 @@ async function runParallelSubagents( nextIndex += 1 if (current >= tasks.length) return - results[current] = await runSingleSubagent(pi, baseCwd, tasks[current], signal, timeoutMs) + results[current] = await runSingleSubagent(pi, tasks[current], signal, timeoutMs) completed += 1 onProgress?.(completed, tasks.length) } @@ -238,16 +1241,78 @@ export default function (pi: ExtensionAPI) { }, }) + pi.registerTool({ + name: "ce_list_capabilities", + label: "Compound Engineering Capabilities", + description: "List the current verified Pi capabilities available in the active workspace.", + parameters: Type.Object({}), + async execute(_toolCallId, _params, _signal, _onUpdate, ctx) { + const capabilities = listCurrentCapabilities(ctx.cwd) + return { + content: [{ type: "text", text: JSON.stringify(capabilities, null, 2) }], + details: capabilities, + } + }, + }) + + pi.registerTool({ + name: "ce_run_prompt", + label: "Compound Engineering Prompt", + description: "Run a verified Pi prompt by alias in the active workspace.", + parameters: Type.Object({ + prompt: Type.String({ description: "Prompt name or qualified alias to invoke" }), + args: Type.Optional(Type.String({ description: "Optional prompt arguments appended after the prompt name" })), + cwd: Type.Optional(Type.String({ description: "Optional working directory for this prompt run" })), + timeoutMs: Type.Optional(Type.Number({ default: DEFAULT_SUBAGENT_TIMEOUT_MS })), + }), + async execute(_toolCallId, params, signal, _onUpdate, ctx) { + try { + const scopedMapCache = new Map>() + const baseScopedMaps = getCachedNamespaceScopedNameMaps(scopedMapCache, ctx.cwd) + const cwd = resolveTaskCwd(ctx.cwd, params.cwd, baseScopedMaps) + const scopedMaps = getCachedNamespaceScopedNameMaps(scopedMapCache, cwd) + const prompt = resolvePromptName(cwd, params.prompt, scopedMaps) + if (!prompt) { + throw new Error("Prompt execution requires a valid prompt name") + } + + const promptArgs = String(params.args ?? "").trim() + const promptCommand = promptArgs ? "/" + prompt + " " + promptArgs : "/" + prompt + const script = "cd " + shellEscape(cwd) + " && pi --no-session -p " + shellEscape(promptCommand) + const timeoutMs = Number(params.timeoutMs || DEFAULT_SUBAGENT_TIMEOUT_MS) + const result = await pi.exec("bash", ["-lc", script], { signal, timeout: timeoutMs }) + const output = truncate(result.stdout || result.stderr || "") + + return { + isError: result.code !== 0, + content: [{ type: "text", text: output || "(no output)" }], + details: { + exitCode: result.code, + prompt, + cwd, + command: promptCommand, + }, + } + } catch (error) { + return { + isError: true, + content: [{ type: "text", text: error instanceof Error ? error.message : String(error) }], + details: {}, + } + } + }, + }) + const subagentTaskSchema = Type.Object({ agent: Type.String({ description: "Skill/agent name to invoke" }), - task: Type.String({ description: "Task instructions for that skill" }), + task: Type.Optional(Type.String({ description: "Task instructions for that skill" })), cwd: Type.Optional(Type.String({ description: "Optional working directory for this task" })), }) pi.registerTool({ - name: "subagent", - label: "Subagent", - description: "Run one or more skill-based subagent tasks. Supports single, parallel, and chained execution.", + name: "ce_subagent", + label: "Compound Engineering Subagent", + description: "Run one or more Compound Engineering skill-based subagent tasks. Supports single, parallel, and chained execution.", parameters: Type.Object({ agent: Type.Optional(Type.String({ description: "Single subagent name" })), task: Type.Optional(Type.String({ description: "Single subagent task" })), @@ -258,27 +1323,27 @@ export default function (pi: ExtensionAPI) { timeoutMs: Type.Optional(Type.Number({ default: DEFAULT_SUBAGENT_TIMEOUT_MS })), }), async execute(_toolCallId, params, signal, onUpdate, ctx) { - const hasSingle = Boolean(params.agent && params.task) const hasTasks = Boolean(params.tasks && params.tasks.length > 0) const hasChain = Boolean(params.chain && params.chain.length > 0) + const hasSingle = Boolean(params.agent) const modeCount = Number(hasSingle) + Number(hasTasks) + Number(hasChain) if (modeCount !== 1) { return { isError: true, - content: [{ type: "text", text: "Provide exactly one mode: single (agent+task), tasks, or chain." }], + content: [{ type: "text", text: "Provide exactly one mode: single (agent with optional task), tasks, or chain." }], details: {}, } } const timeoutMs = Number(params.timeoutMs || DEFAULT_SUBAGENT_TIMEOUT_MS) + const scopedMapCache = new Map>() try { if (hasSingle) { const result = await runSingleSubagent( pi, - ctx.cwd, - { agent: params.agent!, task: params.task!, cwd: params.cwd }, + prepareSubagentTaskWithCache(ctx.cwd, { agent: params.agent!, task: params.task!, cwd: params.cwd }, scopedMapCache), signal, timeoutMs, ) @@ -292,12 +1357,11 @@ export default function (pi: ExtensionAPI) { } if (hasTasks) { - const tasks = params.tasks as SubagentTask[] + const tasks = (params.tasks as SubagentTask[]).map((task) => prepareSubagentTaskWithCache(ctx.cwd, task, scopedMapCache)) const maxConcurrency = Number(params.maxConcurrency || 4) const results = await runParallelSubagents( pi, - ctx.cwd, tasks, signal, timeoutMs, @@ -321,15 +1385,17 @@ export default function (pi: ExtensionAPI) { } const chain = params.chain as SubagentTask[] + const preparedChain = chain.map((step) => prepareSubagentTaskWithCache(ctx.cwd, step, scopedMapCache)) const results: SubagentResult[] = [] let previous = "" - for (const step of chain) { - const resolvedTask = step.task.replace(/\\{previous\\}/g, previous) + for (let i = 0; i < chain.length; i += 1) { + const step = chain[i]! + const prepared = preparedChain[i]! + const resolvedTask = String(step.task ?? "").replace(/\\{previous\\}/g, previous) const result = await runSingleSubagent( pi, - ctx.cwd, - { agent: step.agent, task: resolvedTask, cwd: step.cwd }, + { ...prepared, taskText: resolvedTask.trim() }, signal, timeoutMs, ) @@ -370,7 +1436,6 @@ export default function (pi: ExtensionAPI) { server: Type.String({ description: "Configured MCP server name" }), allParameters: Type.Optional(Type.Boolean({ default: false })), json: Type.Optional(Type.Boolean({ default: true })), - configPath: Type.Optional(Type.String({ description: "Optional mcporter config path" })), }), async execute(_toolCallId, params, signal, _onUpdate, ctx) { const args = ["list", params.server] @@ -406,7 +1471,6 @@ export default function (pi: ExtensionAPI) { server: Type.Optional(Type.String({ description: "Server name (if call is omitted)" })), tool: Type.Optional(Type.String({ description: "Tool name (if call is omitted)" })), args: Type.Optional(Type.Record(Type.String(), Type.Any(), { description: "JSON arguments object" })), - configPath: Type.Optional(Type.String({ description: "Optional mcporter config path" })), }), async execute(_toolCallId, params, signal, _onUpdate, ctx) { const args = ["call"] diff --git a/src/types/claude.ts b/src/types/claude.ts index 9e00f7fdd..593f12a03 100644 --- a/src/types/claude.ts +++ b/src/types/claude.ts @@ -49,6 +49,9 @@ export type ClaudeSkill = { description?: string argumentHint?: string disableModelInvocation?: boolean + entryDir?: string + trustedRoot?: string + trustedBoundary?: string sourceDir: string skillPath: string } diff --git a/src/types/js-yaml.d.ts b/src/types/js-yaml.d.ts new file mode 100644 index 000000000..256f76a3b --- /dev/null +++ b/src/types/js-yaml.d.ts @@ -0,0 +1,4 @@ +declare module "js-yaml" { + export function load(input: string): unknown + export function dump(input: unknown, options?: unknown): string +} diff --git a/src/types/pi.ts b/src/types/pi.ts index 96df7849b..dd9884e98 100644 --- a/src/types/pi.ts +++ b/src/types/pi.ts @@ -37,4 +37,5 @@ export type PiBundle = { generatedSkills: PiGeneratedSkill[] extensions: PiExtensionFile[] mcporterConfig?: PiMcporterConfig + nameMaps?: import("../utils/pi-skills").PiNameMaps } diff --git a/src/utils/files.ts b/src/utils/files.ts index ad35c9923..05f1cf222 100644 --- a/src/utils/files.ts +++ b/src/utils/files.ts @@ -1,13 +1,68 @@ import { promises as fs } from "fs" import path from "path" +export type AtomicWriteFailureStage = "beforeWrite" | "beforeRename" + +type AtomicWriteFailureHook = (filePath: string, stage: AtomicWriteFailureStage) => void | Promise +type ManagedPathSnapshotHook = (targetPath: string) => void | Promise + +export type TextFileSnapshot = { + filePath: string + existed: boolean + content?: string + mode?: number +} + +export type ManagedPathSnapshot = + | { + targetPath: string + existed: false + } + | { + targetPath: string + existed: true + kind: "symlink" + linkTarget: string + } + | { + targetPath: string + existed: true + kind: "file" + tempPath: string + mode?: number + } + | { + targetPath: string + existed: true + kind: "directory" + tempPath: string + mode?: number + } + +let atomicWriteFailureHook: AtomicWriteFailureHook | null = null +let managedPathSnapshotHook: ManagedPathSnapshotHook | null = null + +export function setAtomicWriteFailureHookForTests(hook: AtomicWriteFailureHook | null): void { + atomicWriteFailureHook = hook +} + +export function setManagedPathSnapshotHookForTests(hook: ManagedPathSnapshotHook | null): void { + managedPathSnapshotHook = hook +} + export async function backupFile(filePath: string): Promise { if (!(await pathExists(filePath))) return null try { const timestamp = new Date().toISOString().replace(/[:.]/g, "-") const backupPath = `${filePath}.bak.${timestamp}` + const stats = await fs.lstat(filePath) + if (!stats.isFile() || stats.isSymbolicLink()) { + throw new Error(`Refusing to back up unexpected target ${filePath}`) + } + await assertNoSymlinkAncestors(backupPath) await fs.copyFile(filePath, backupPath) + await fs.chmod(backupPath, stats.mode & 0o777) return backupPath } catch { return null @@ -27,6 +82,50 @@ export async function ensureDir(dirPath: string): Promise { await fs.mkdir(dirPath, { recursive: true }) } +export async function ensureManagedDir(dirPath: string): Promise { + const resolved = path.resolve(dirPath) + const root = path.parse(resolved).root + const relative = path.relative(root, resolved) + if (!relative) return + + let current = root + for (const segment of relative.split(path.sep).filter(Boolean)) { + current = path.join(current, segment) + + const stats = await fs.lstat(current).catch(() => null) + if (stats) { + if (stats.isSymbolicLink()) { + throw new Error(`Refusing to mutate through symlinked ancestor ${current}`) + } + if (!stats.isDirectory()) { + throw new Error(`Refusing to mutate through non-directory ancestor ${current}`) + } + continue + } + + try { + await fs.mkdir(current) + } catch (error) { + if ((error as NodeJS.ErrnoException).code !== "EEXIST") { + throw error + } + } + const rechecked = await fs.lstat(current) + if (rechecked.isSymbolicLink()) { + throw new Error(`Refusing to mutate through symlinked ancestor ${current}`) + } + if (!rechecked.isDirectory()) { + throw new Error(`Refusing to mutate through non-directory ancestor ${current}`) + } + } +} + +export async function ensureManagedParentDir(filePath: string): Promise { + await assertNoSymlinkAncestors(filePath) + await ensureManagedDir(path.dirname(filePath)) + await assertNoSymlinkAncestors(filePath) +} + export async function readText(filePath: string): Promise { return fs.readFile(filePath, "utf8") } @@ -37,21 +136,73 @@ export async function readJson(filePath: string): Promise { } export async function writeText(filePath: string, content: string): Promise { - await ensureDir(path.dirname(filePath)) + await ensureManagedParentDir(filePath) await fs.writeFile(filePath, content, "utf8") } +export async function writeTextIfChanged( + filePath: string, + content: string, + options?: { existingContent?: string | null }, +): Promise { + return writeTextAtomicIfChanged({ filePath, content, existingContent: options?.existingContent }) +} + export async function writeTextSecure(filePath: string, content: string): Promise { await ensureDir(path.dirname(filePath)) await fs.writeFile(filePath, content, { encoding: "utf8", mode: 0o600 }) await fs.chmod(filePath, 0o600) } +export async function writeTextSecureIfChanged(filePath: string, content: string): Promise { + return writeTextAtomicIfChanged({ filePath, content, mode: 0o600 }) +} + export async function writeJson(filePath: string, data: unknown): Promise { const content = JSON.stringify(data, null, 2) await writeText(filePath, content + "\n") } +export async function writeJsonIfChanged(filePath: string, data: unknown): Promise { + return writeJsonAtomicIfChanged({ filePath, data }) +} + +export async function removeDirIfExists(dirPath: string): Promise { + try { + await fs.rm(dirPath, { recursive: true, force: true }) + } catch (error) { + if ((error as NodeJS.ErrnoException).code !== "ENOENT") { + throw error + } + } +} + +export async function removeFileIfExists(filePath: string): Promise { + try { + await assertNoSymlinkAncestors(filePath) + const stats = await fs.lstat(filePath) + if (stats.isSymbolicLink()) { + throw new Error(`Refusing to remove symlink target ${filePath}`) + } + if (!stats.isFile()) { + throw new Error(`Refusing to remove unexpected target ${filePath}`) + } + await assertNoSymlinkAncestors(filePath) + const rechecked = await fs.lstat(filePath) + if (rechecked.isSymbolicLink()) { + throw new Error(`Refusing to remove symlink target ${filePath}`) + } + if (!rechecked.isFile()) { + throw new Error(`Refusing to remove unexpected target ${filePath}`) + } + await fs.unlink(filePath) + } catch (error) { + if ((error as NodeJS.ErrnoException).code !== "ENOENT") { + throw error + } + } +} + /** Write JSON with restrictive permissions (0o600) for files containing secrets */ export async function writeJsonSecure(filePath: string, data: unknown): Promise { const content = JSON.stringify(data, null, 2) @@ -60,6 +211,350 @@ export async function writeJsonSecure(filePath: string, data: unknown): Promise< await fs.chmod(filePath, 0o600) } +export async function writeJsonSecureIfChanged(filePath: string, data: unknown): Promise { + return writeJsonAtomicIfChanged({ filePath, data, mode: 0o600 }) +} + +export async function assertNoSymlinkTarget(filePath: string): Promise { + try { + const stats = await fs.lstat(filePath) + if (stats.isSymbolicLink()) { + throw new Error(`Refusing to write through symlink target ${filePath}`) + } + } catch (error) { + if ((error as NodeJS.ErrnoException).code === "ENOENT") { + return + } + throw error + } +} + +export async function assertNoSymlinkAncestors(targetPath: string): Promise { + const resolvedTarget = path.resolve(targetPath) + const ancestors: string[] = [] + let current = path.dirname(resolvedTarget) + + while (true) { + ancestors.push(current) + const parent = path.dirname(current) + if (parent === current) break + current = parent + } + + for (const ancestor of ancestors.reverse()) { + try { + const stats = await fs.lstat(ancestor) + if (stats.isSymbolicLink()) { + throw new Error(`Refusing to mutate through symlinked ancestor ${ancestor}`) + } + if (!stats.isDirectory()) { + throw new Error(`Refusing to mutate through non-directory ancestor ${ancestor}`) + } + } catch (error) { + if ((error as NodeJS.ErrnoException).code === "ENOENT") { + continue + } + throw error + } + } +} + +export async function writeTextAtomicIfChanged(options: { + filePath: string + content: string + mode?: number + skipFailureHook?: boolean + existingContent?: string | null +}): Promise { + const { filePath, content, mode, skipFailureHook = false, existingContent } = options + await assertNoSymlinkAncestors(filePath) + await assertNoSymlinkTarget(filePath) + const existing = existingContent === undefined ? await readText(filePath).catch(() => null) : existingContent + if (existing === content) { + return false + } + + await ensureManagedParentDir(filePath) + await assertNoSymlinkTarget(filePath) + await maybeFailAtomicWrite(filePath, "beforeWrite", skipFailureHook) + + const tempPath = path.join( + path.dirname(filePath), + `.${path.basename(filePath)}.tmp-${process.pid}-${Date.now()}-${Math.random().toString(16).slice(2)}`, + ) + + try { + await fs.writeFile(tempPath, content, { + encoding: "utf8", + mode: mode ?? 0o644, + }) + if (mode !== undefined) { + await fs.chmod(tempPath, mode) + } + await maybeFailAtomicWrite(filePath, "beforeRename", skipFailureHook) + await assertNoSymlinkAncestors(filePath) + await assertNoSymlinkTarget(filePath) + await fs.rename(tempPath, filePath) + } catch (error) { + await fs.unlink(tempPath).catch(() => undefined) + throw error + } + + return true +} + +export async function writeFileAtomicIfChanged(options: { + filePath: string + content: Buffer + mode?: number + skipFailureHook?: boolean +}): Promise { + const { filePath, content, mode, skipFailureHook = false } = options + await assertNoSymlinkAncestors(filePath) + await assertNoSymlinkTarget(filePath) + const existing = await fs.readFile(filePath).catch(() => null) + if (existing && existing.equals(content)) { + return false + } + + await ensureManagedParentDir(filePath) + await assertNoSymlinkTarget(filePath) + await maybeFailAtomicWrite(filePath, "beforeWrite", skipFailureHook) + + const tempPath = path.join( + path.dirname(filePath), + `.${path.basename(filePath)}.tmp-${process.pid}-${Date.now()}-${Math.random().toString(16).slice(2)}`, + ) + + try { + await fs.writeFile(tempPath, content, { mode: mode ?? 0o644 }) + if (mode !== undefined) { + await fs.chmod(tempPath, mode) + } + await maybeFailAtomicWrite(filePath, "beforeRename", skipFailureHook) + await assertNoSymlinkAncestors(filePath) + await assertNoSymlinkTarget(filePath) + await fs.rename(tempPath, filePath) + } catch (error) { + await fs.unlink(tempPath).catch(() => undefined) + throw error + } + + return true +} + +export async function writeJsonAtomicIfChanged(options: { + filePath: string + data: unknown + mode?: number + skipFailureHook?: boolean +}): Promise { + return writeTextAtomicIfChanged({ + filePath: options.filePath, + content: JSON.stringify(options.data, null, 2) + "\n", + mode: options.mode, + skipFailureHook: options.skipFailureHook, + }) +} + +export async function captureTextFileSnapshot(filePath: string): Promise { + await assertNoSymlinkAncestors(filePath) + try { + const stats = await fs.lstat(filePath) + if (!stats.isFile()) { + if (stats.isSymbolicLink()) { + throw new Error(`Refusing to snapshot symlink target ${filePath}`) + } + throw new Error(`Refusing to snapshot non-file target ${filePath}`) + } + + return { + filePath, + existed: true, + content: await fs.readFile(filePath, "utf8"), + mode: stats.mode & 0o777, + } + } catch (error) { + if ((error as NodeJS.ErrnoException).code === "ENOENT") { + return { filePath, existed: false } + } + throw error + } +} + +export async function restoreTextFileSnapshot(snapshot: TextFileSnapshot): Promise { + if (!snapshot.existed) { + await removeManagedFileIfExists(snapshot.filePath) + return + } + + await writeTextAtomicIfChanged({ + filePath: snapshot.filePath, + content: snapshot.content ?? "", + mode: snapshot.mode, + skipFailureHook: true, + }) +} + +export async function captureManagedPathSnapshot(targetPath: string, snapshotRoot: string): Promise { + if (managedPathSnapshotHook) { + await managedPathSnapshotHook(targetPath) + } + await assertNoSymlinkAncestors(targetPath) + const stats = await fs.lstat(targetPath).catch(() => null) + if (!stats) { + return { targetPath, existed: false } + } + + if (stats.isSymbolicLink()) { + return { + targetPath, + existed: true, + kind: "symlink", + linkTarget: await fs.readlink(targetPath), + } + } + + const tempPath = path.join(snapshotRoot, `${Date.now()}-${Math.random().toString(16).slice(2)}`) + + if (stats.isDirectory()) { + await copyManagedSnapshotDirectory(targetPath, tempPath) + return { + targetPath, + existed: true, + kind: "directory", + tempPath, + mode: stats.mode & 0o777, + } + } + + if (!stats.isFile()) { + throw new Error(`Refusing to snapshot non-file target ${targetPath}`) + } + + await ensureDir(path.dirname(tempPath)) + await fs.copyFile(targetPath, tempPath) + return { + targetPath, + existed: true, + kind: "file", + tempPath, + mode: stats.mode & 0o777, + } +} + +export async function restoreManagedPathSnapshot(snapshot: ManagedPathSnapshot): Promise { + if (!snapshot.existed) { + await removeManagedPathIfExists(snapshot.targetPath) + return + } + + await assertNoSymlinkAncestors(snapshot.targetPath) + await removeManagedPathIfExists(snapshot.targetPath) + + if (snapshot.kind === "symlink") { + await assertNoSymlinkAncestors(snapshot.targetPath) + await ensureDir(path.dirname(snapshot.targetPath)) + await fs.symlink(snapshot.linkTarget, snapshot.targetPath) + return + } + + if (snapshot.kind === "directory") { + await copyManagedSnapshotDirectory(snapshot.tempPath, snapshot.targetPath) + if (snapshot.mode !== undefined) { + await fs.chmod(snapshot.targetPath, snapshot.mode) + } + return + } + + await assertNoSymlinkAncestors(snapshot.targetPath) + await ensureDir(path.dirname(snapshot.targetPath)) + await fs.copyFile(snapshot.tempPath, snapshot.targetPath) + if (snapshot.mode !== undefined) { + await fs.chmod(snapshot.targetPath, snapshot.mode) + } +} + +export async function removeManagedPathIfExists(targetPath: string): Promise { + await assertNoSymlinkAncestors(targetPath) + const stats = await fs.lstat(targetPath).catch(() => null) + if (!stats) return + if (stats.isSymbolicLink()) { + await assertNoSymlinkAncestors(targetPath) + const rechecked = await fs.lstat(targetPath) + if (!rechecked.isSymbolicLink()) { + throw new Error(`Refusing to remove unexpected target ${targetPath}`) + } + await fs.unlink(targetPath) + return + } + if (stats.isDirectory()) { + await assertNoSymlinkAncestors(targetPath) + const rechecked = await fs.lstat(targetPath) + if (!rechecked.isDirectory() || rechecked.isSymbolicLink()) { + throw new Error(`Refusing to remove unexpected target ${targetPath}`) + } + await fs.rm(targetPath, { recursive: true, force: true }) + return + } + if (stats.isFile()) { + await assertNoSymlinkAncestors(targetPath) + const rechecked = await fs.lstat(targetPath) + if (!rechecked.isFile() || rechecked.isSymbolicLink()) { + throw new Error(`Refusing to remove unexpected target ${targetPath}`) + } + await fs.unlink(targetPath) + return + } + throw new Error(`Refusing to remove unexpected target ${targetPath}`) +} + +export async function removeManagedFileIfExists(filePath: string): Promise { + try { + await assertNoSymlinkAncestors(filePath) + const stats = await fs.lstat(filePath) + if (stats.isSymbolicLink()) { + throw new Error(`Refusing to remove symlink target ${filePath}`) + } + await fs.unlink(filePath) + } catch (error) { + if ((error as NodeJS.ErrnoException).code === "ENOENT") { + return + } + throw error + } +} + +async function copyManagedSnapshotDirectory(sourceDir: string, targetDir: string): Promise { + await assertNoSymlinkAncestors(targetDir) + await ensureDir(targetDir) + const entries = await fs.readdir(sourceDir, { withFileTypes: true }) + + for (const entry of entries) { + const sourcePath = path.join(sourceDir, entry.name) + const targetPath = path.join(targetDir, entry.name) + + if (entry.isDirectory()) { + await copyManagedSnapshotDirectory(sourcePath, targetPath) + continue + } + + if (entry.isFile()) { + await assertNoSymlinkAncestors(targetPath) + await ensureDir(path.dirname(targetPath)) + await fs.copyFile(sourcePath, targetPath) + continue + } + + throw new Error(`Refusing to snapshot unexpected target ${sourcePath}`) + } +} + +async function maybeFailAtomicWrite(filePath: string, stage: AtomicWriteFailureStage, skipFailureHook: boolean): Promise { + if (skipFailureHook || !atomicWriteFailureHook) return + await atomicWriteFailureHook(filePath, stage) +} + export async function walkFiles(root: string): Promise { const entries = await fs.readdir(root, { withFileTypes: true }) const results: string[] = [] @@ -85,6 +580,41 @@ export function sanitizePathName(name: string): string { return name.replace(/:/g, "-") } +export function isSafePathComponent(name: string): boolean { + if (!name || name.length === 0) return false + if (name === "." || name === "..") return false + if (name.includes("\0")) return false + if (name.includes("/") || name.includes("\\")) return false + if (name.includes("..")) return false + return true +} + +export function assertSafePathComponent(name: string, label = "path component"): string { + const value = String(name || "").trim() + if (!isSafePathComponent(value)) { + throw new Error(`Unsafe ${label}: ${name}`) + } + return value +} + +export function sanitizeSafePathName(name: string, label = "path component"): string { + const safeName = assertSafePathComponent(name, label) + const sanitized = sanitizePathName(safeName) + if (!isSafePathComponent(sanitized)) { + throw new Error(`Unsafe ${label}: ${name}`) + } + return sanitized +} + +export function assertPathWithinRoot(targetPath: string, root: string, label = "path"): string { + const resolvedRoot = path.resolve(root) + const resolvedTarget = path.resolve(targetPath) + if (resolvedTarget !== resolvedRoot && !resolvedTarget.startsWith(resolvedRoot + path.sep)) { + throw new Error(`Refusing to use ${label} outside managed root ${resolvedTarget}`) + } + return resolvedTarget +} + /** * Resolve a colon-separated command name into a filesystem path. * e.g. resolveCommandPath("/commands", "ce:plan", ".md") -> "/commands/ce/plan.md" @@ -116,20 +646,14 @@ export async function copyDir(sourceDir: string, targetDir: string): Promise string, - transformAllMarkdown?: boolean, ): Promise { await ensureDir(targetDir) const entries = await fs.readdir(sourceDir, { withFileTypes: true }) @@ -139,12 +663,9 @@ export async function copySkillDir( const targetPath = path.join(targetDir, entry.name) if (entry.isDirectory()) { - await copySkillDir(sourcePath, targetPath, transformSkillContent, transformAllMarkdown) + await copySkillDir(sourcePath, targetPath, transformSkillContent) } else if (entry.isFile()) { - const shouldTransform = transformSkillContent && ( - entry.name === "SKILL.md" || (transformAllMarkdown && entry.name.endsWith(".md")) - ) - if (shouldTransform) { + if (entry.name === "SKILL.md" && transformSkillContent) { const content = await readText(sourcePath) await writeText(targetPath, transformSkillContent(content)) } else { diff --git a/src/utils/pi-layout.ts b/src/utils/pi-layout.ts new file mode 100644 index 000000000..d53b8a1f7 --- /dev/null +++ b/src/utils/pi-layout.ts @@ -0,0 +1,104 @@ +import { createHash } from "crypto" +import os from "os" +import path from "path" + +export const PI_MANAGED_MANIFEST_NAME = "compound-engineering-managed.json" +export const PI_MANAGED_VERIFICATION_DIR_NAME = "pi-managed" + +export type PiLayoutMode = "install" | "sync" + +export type PiLayout = { + root: string + skillsDir: string + promptsDir: string + extensionsDir: string + mcporterConfigPath: string + managedManifestPath: string + agentsPath: string + verificationPath: string +} + +export function canonicalizePiPath(targetPath: string): string { + const resolved = path.resolve(targetPath) + const normalized = resolved.replace(/[\\/]+$/, "") + return normalized || resolved +} + +export function samePiPath(left: string, right: string): boolean { + return canonicalizePiPath(left) === canonicalizePiPath(right) +} + +function createPiLayout(root: string, agentsPath: string): PiLayout { + const normalizedRoot = canonicalizePiPath(root) + const normalizedAgentsPath = canonicalizePiPath(agentsPath) + return { + root: normalizedRoot, + skillsDir: path.join(normalizedRoot, "skills"), + promptsDir: path.join(normalizedRoot, "prompts"), + extensionsDir: path.join(normalizedRoot, "extensions"), + mcporterConfigPath: path.join(normalizedRoot, "compound-engineering", "mcporter.json"), + managedManifestPath: path.join(normalizedRoot, "compound-engineering", PI_MANAGED_MANIFEST_NAME), + agentsPath: normalizedAgentsPath, + verificationPath: resolveVerificationPath( + normalizedRoot, + path.join(normalizedRoot, "compound-engineering", PI_MANAGED_MANIFEST_NAME), + ), + } +} + +function isDirectPiRoot(outputRoot: string): boolean { + const normalized = canonicalizePiPath(outputRoot) + const home = process.env.HOME || os.homedir() + const globalPiRoot = canonicalizePiPath(path.join(home, ".pi", "agent")) + + return normalized === globalPiRoot || path.basename(normalized) === ".pi" +} + +function resolveVerificationPath(root: string, managedManifestPath: string): string { + const stateHome = process.env.COMPOUND_ENGINEERING_HOME || os.homedir() + const identity = createHash("sha256") + .update(`${canonicalizePiPath(root)}:${canonicalizePiPath(managedManifestPath)}`) + .digest("hex") + return path.join(stateHome, ".compound-engineering", PI_MANAGED_VERIFICATION_DIR_NAME, `${identity}.json`) +} + +export function resolvePiLayout(outputRoot: string, mode: PiLayoutMode): PiLayout { + const normalizedOutputRoot = canonicalizePiPath(outputRoot) + + if (mode === "sync") { + return createPiLayout(normalizedOutputRoot, path.join(normalizedOutputRoot, "AGENTS.md")) + } + + if (isDirectPiRoot(normalizedOutputRoot)) { + return createPiLayout(normalizedOutputRoot, path.join(normalizedOutputRoot, "AGENTS.md")) + } + + const root = path.join(normalizedOutputRoot, ".pi") + return createPiLayout(root, path.join(normalizedOutputRoot, "AGENTS.md")) +} + +export function isPathWithinRoot(root: string, targetPath: string): boolean { + const resolvedRoot = canonicalizePiPath(root) + const resolvedTarget = canonicalizePiPath(targetPath) + return resolvedTarget === resolvedRoot || resolvedTarget.startsWith(resolvedRoot + path.sep) +} + +export function resolvePiProjectPathFromCwd(cwd: string, relativePath: string): string | undefined { + return getPiLayoutSearchPaths(cwd, relativePath)[0] +} + +export function getPiLayoutSearchPaths(cwd: string, relativePath: string): string[] { + const paths: string[] = [] + let current = path.resolve(cwd) + + while (true) { + paths.push(path.join(current, relativePath)) + paths.push(path.join(current, ".pi", relativePath)) + + const parent = path.dirname(current) + if (parent === current) break + current = parent + } + + return paths +} diff --git a/src/utils/pi-managed.ts b/src/utils/pi-managed.ts new file mode 100644 index 000000000..7a31d39a8 --- /dev/null +++ b/src/utils/pi-managed.ts @@ -0,0 +1,970 @@ +import type { Stats } from "fs" +import { promises as fs } from "fs" +import { createHash } from "crypto" +import os from "os" +import path from "path" +import type { + PiManagedArtifact, + PiManagedLegacyArtifact, + PiManagedManifest, + PiManagedSection, + PiManagedVerificationRecord, + PiManagedVerificationStatus, +} from "../types/pi" +import { captureTextFileSnapshot, ensureDir, isSafePathComponent, pathExists, readJson, readText, removeFileIfExists, restoreTextFileSnapshot, sanitizePathName, writeJsonIfChanged } from "./files" +import type { PiNameMaps } from "./pi-skills" +import { normalizePiSkillName } from "./pi-skills" +import type { PiLayout } from "./pi-layout" +import { canonicalizePiPath, isPathWithinRoot } from "./pi-layout" +import { getPiPolicyFingerprint } from "./pi-policy" + +type PiManagedArtifactKind = PiManagedArtifact["kind"] +type PiManagedSectionName = "install" | "sync" + +export type PiManagedStateSection = { + nameMaps: PiNameMaps + artifacts: PiManagedArtifact[] + mcpServers: string[] + sharedResources: { + compatExtension: boolean + mcporterConfig: boolean + } +} + +export type PiManagedState = { + version: 1 + pluginName?: string + policyFingerprint?: string + install: PiManagedStateSection + sync: PiManagedStateSection + nameMaps: PiNameMaps +} + +export type PiManagedStateWithTrust = { + status: PiManagedVerificationStatus + state: PiManagedState | null + verifiedSections: Record +} + +export type PiManagedTrustSectionName = PiManagedSectionName + +export type PiManagedTrustInfo = { + status: PiManagedVerificationStatus + state: PiManagedState | null + isVerified: boolean + verifiedSections: Record +} + +export type PiLegacyArtifactCandidate = { + expectedKind: "file" | "directory" + path: string +} + +export type PiLegacyCustomRootInstallCleanupPlan = { + artifactCandidates: PiLegacyArtifactCandidate[] + removeCompatExtension: boolean + pruneMcporterKeys: string[] + warnings: string[] +} + +export type PiManagedSectionHashable = { + nameMaps?: PiNameMaps + artifacts?: PiManagedArtifact[] + mcpServers?: string[] + sharedResources?: { + compatExtension?: boolean + mcporterConfig?: boolean + } +} + +const PI_MANAGED_VERIFICATION_VERSION = 1 + +function resolvePiManagedStateHome(): string { + return process.env.COMPOUND_ENGINEERING_HOME || os.homedir() +} + +function resolvePiManagedMachineKeyDir(): string { + return path.join(resolvePiManagedStateHome(), ".compound-engineering") +} + +function resolvePiManagedMachineKeyPath(): string { + return path.join(resolvePiManagedMachineKeyDir(), "pi-managed-key") +} + +type PiManagedLoadedVerificationRecord = PiManagedVerificationRecord & { + machineKey: string +} + +export async function loadPiManagedState(layout: PiLayout): Promise { + const trusted = await loadPiManagedStateWithTrust(layout) + return trusted.state +} + +export async function loadPiManagedStateWithTrust(layout: PiLayout): Promise { + if (!(await pathExists(layout.managedManifestPath))) { + return { status: "missing", state: null, verifiedSections: { install: false, sync: false } } + } + + let raw: PiManagedManifest + try { + raw = await readJson(layout.managedManifestPath) + } catch { + return { status: "invalid", state: null, verifiedSections: { install: false, sync: false } } + } + + const install = normalizeSection( + raw.install, + filterLegacyNameMapsForSection(raw.nameMaps, "install"), + raw.installPrompts, + raw.generatedSkills, + layout, + "install", + ) + const sync = normalizeSection( + raw.sync, + filterLegacyNameMapsForSection(raw.nameMaps, "sync"), + raw.syncPrompts, + undefined, + layout, + "sync", + ) + + const state: PiManagedState = { + version: 1, + pluginName: raw.pluginName, + policyFingerprint: raw.policyFingerprint, + install, + sync, + nameMaps: mergeEffectiveNameMaps(install.nameMaps, sync.nameMaps), + } + + const verification = await loadVerificationRecord(layout) + if (!verification) return { status: "legacy", state, verifiedSections: { install: false, sync: false } } + + const currentRoot = path.resolve(layout.root) + const currentManifestPath = path.resolve(layout.managedManifestPath) + if (verification.root !== currentRoot || verification.manifestPath !== currentManifestPath) { + return { status: "stale", state, verifiedSections: { install: false, sync: false } } + } + + const currentPolicyFingerprint = getPiPolicyFingerprint() + if (state.policyFingerprint !== currentPolicyFingerprint || verification.policyFingerprint !== currentPolicyFingerprint) { + return { status: "stale", state, verifiedSections: { install: false, sync: false } } + } + + const installHash = hashManagedSection(layout, install) + const syncHash = hashManagedSection(layout, sync) + const installMatches = (!hasSectionData(install) && !verification.install) + || verification.install?.hash === signSectionHash(verification.machineKey, installHash) + const syncMatches = (!hasSectionData(sync) && !verification.sync) + || verification.sync?.hash === signSectionHash(verification.machineKey, syncHash) + + const verifiedSections = { + install: hasSectionData(install) ? installMatches : false, + sync: hasSectionData(sync) ? syncMatches : false, + } + + const hasUnverifiedSection = (hasSectionData(install) && !verifiedSections.install) + || (hasSectionData(sync) && !verifiedSections.sync) + if (!hasUnverifiedSection) { + return { status: "verified", state, verifiedSections } + } + + return { status: "stale", state, verifiedSections } +} + +export async function writePiManagedState( + layout: PiLayout, + state: PiManagedState, + verifiedSections: Partial> = { install: true, sync: true }, +): Promise { + const effectiveState = state.policyFingerprint + ? state + : { ...state, policyFingerprint: getPiPolicyFingerprint() } + if (!shouldWritePiManagedState(state)) { + const [existingManifest, existingVerification] = await Promise.all([ + readText(layout.managedManifestPath).catch(() => null), + readText(layout.verificationPath).catch(() => null), + ]) + if (existingManifest === null && existingVerification === null) { + return false + } + + const manifestSnapshot = await captureTextFileSnapshot(layout.managedManifestPath) + const verificationSnapshot = await captureTextFileSnapshot(layout.verificationPath) + try { + await removeFileIfExists(layout.managedManifestPath) + await removeFileIfExists(layout.verificationPath) + } catch (error) { + await restoreTextFileSnapshot(manifestSnapshot) + await restoreTextFileSnapshot(verificationSnapshot) + throw error + } + return true + } + + const manifest: PiManagedManifest = { + version: 1, + pluginName: effectiveState.pluginName, + policyFingerprint: effectiveState.policyFingerprint, + nameMaps: mergeEffectiveNameMaps(effectiveState.install.nameMaps, effectiveState.sync.nameMaps), + install: serializeSection(effectiveState.install), + sync: serializeSection(effectiveState.sync), + installPrompts: serializeLegacyArtifacts(layout, effectiveState.install.artifacts, "prompt"), + syncPrompts: serializeLegacyArtifacts(layout, effectiveState.sync.artifacts, "prompt"), + generatedSkills: serializeLegacyArtifacts(layout, effectiveState.install.artifacts, "generated-skill"), + } + + const verification = await createVerificationRecord(layout, effectiveState, verifiedSections) + const nextManifestContent = JSON.stringify(manifest, null, 2) + "\n" + const nextVerificationContent = JSON.stringify(verification, null, 2) + "\n" + const [existingManifest, existingVerification] = await Promise.all([ + readText(layout.managedManifestPath).catch(() => null), + readText(layout.verificationPath).catch(() => null), + ]) + if (existingManifest === nextManifestContent && existingVerification === nextVerificationContent) { + return false + } + + const manifestSnapshot = await captureTextFileSnapshot(layout.managedManifestPath) + const verificationSnapshot = await captureTextFileSnapshot(layout.verificationPath) + + try { + await writeJsonIfChanged(layout.managedManifestPath, manifest) + await writeJsonIfChanged(layout.verificationPath, verification) + } catch (error) { + await restoreTextFileSnapshot(manifestSnapshot) + await restoreTextFileSnapshot(verificationSnapshot) + throw error + } + return true +} + +export function createEmptyPiManagedState(pluginName?: string): PiManagedState { + return { + version: 1, + pluginName, + policyFingerprint: undefined, + install: createPiManagedSection(), + sync: createPiManagedSection(), + nameMaps: emptyPiNameMaps(), + } +} + +export function replacePiManagedSection( + state: PiManagedState | null, + sectionName: PiManagedSectionName, + nextSection: PiManagedStateSection, + pluginName?: string, +): PiManagedState { + const current = state ?? createEmptyPiManagedState(pluginName) + const nextState: PiManagedState = { + version: 1, + pluginName: pluginName ?? current.pluginName, + policyFingerprint: current.policyFingerprint, + install: sectionName === "install" ? nextSection : current.install, + sync: sectionName === "sync" ? nextSection : current.sync, + nameMaps: emptyPiNameMaps(), + } + nextState.nameMaps = mergeEffectiveNameMaps(nextState.install.nameMaps, nextState.sync.nameMaps) + return nextState +} + +export function createPiManagedSection(options?: { + nameMaps?: PiNameMaps + artifacts?: PiManagedArtifact[] + mcpServers?: string[] + sharedResources?: { + compatExtension?: boolean + mcporterConfig?: boolean + } +}): PiManagedStateSection { + return { + nameMaps: normalizeNameMaps(options?.nameMaps), + artifacts: [...(options?.artifacts ?? [])], + mcpServers: [...(options?.mcpServers ?? [])].filter(Boolean), + sharedResources: { + compatExtension: options?.sharedResources?.compatExtension ?? false, + mcporterConfig: options?.sharedResources?.mcporterConfig ?? false, + }, + } +} + +export function createManagedArtifact( + layout: PiLayout, + kind: PiManagedArtifactKind, + sourceName: string, + emittedName: string, +): PiManagedArtifact { + const relativePath = path.relative(layout.root, resolveArtifactPath(layout, kind, emittedName)) + return { kind, sourceName, emittedName, relativePath } +} + +export function resolveManagedArtifactPath(layout: PiLayout, artifact: PiManagedArtifact): string | null { + const relativePath = normalizeRelativeArtifactPath(artifact.relativePath) + if (!relativePath) return null + + const absolutePath = path.resolve(layout.root, relativePath) + if (!isPathWithinRoot(layout.root, absolutePath)) return null + if (!isArtifactPathExpected(layout, artifact.kind, artifact.emittedName, absolutePath)) return null + return absolutePath +} + +export function getReservedPiTargetNames(state: PiManagedState | null): { + prompts: Set + skills: Set + agents: Set +} { + const prompts = new Set() + const skills = new Set() + const agents = new Set() + + if (!state) return { prompts, skills, agents } + + for (const section of [state.install, state.sync]) { + for (const value of Object.values(section.nameMaps.prompts ?? {})) prompts.add(value) + for (const value of Object.values(section.nameMaps.skills ?? {})) skills.add(value) + for (const value of Object.values(section.nameMaps.agents ?? {})) { + agents.add(value) + skills.add(value) + } + for (const artifact of section.artifacts) { + if (artifact.kind === "prompt") { + prompts.add(artifact.emittedName) + } else { + skills.add(artifact.emittedName) + } + } + } + + return { prompts, skills, agents } +} + +export async function removeStaleManagedArtifacts( + layout: PiLayout, + previous: PiManagedState | null, + next: PiManagedState, + removeFile: (filePath: string) => Promise, + removeDirectory: (dirPath: string) => Promise, +): Promise { + if (!previous) return + + const retainedPaths = new Set() + for (const section of [next.install, next.sync]) { + for (const artifact of section.artifacts) { + const artifactPath = resolveManagedArtifactPath(layout, artifact) + if (artifactPath) retainedPaths.add(artifactPath) + } + } + + for (const section of [previous.install, previous.sync]) { + for (const artifact of section.artifacts) { + const artifactPath = resolveManagedArtifactPath(layout, artifact) + if (!artifactPath || retainedPaths.has(artifactPath)) continue + + if (artifact.kind === "prompt") { + await removeFile(artifactPath) + } else { + await removeDirectory(artifactPath) + } + } + } +} + +export function collectLegacyArtifactCandidates( + layout: PiLayout, + artifacts: PiManagedArtifact[], + options?: { legacyRoot?: string }, +): PiLegacyArtifactCandidate[] { + const legacyRoot = path.resolve(options?.legacyRoot ?? layout.root) + const seen = new Set() + const candidates: PiLegacyArtifactCandidate[] = [] + + for (const artifact of artifacts) { + const canonicalPath = resolveManagedArtifactPath(layout, artifact) + const legacyNames = new Set([artifact.emittedName]) + const sourceLegacyName = resolveLegacyArtifactSourceName(artifact.sourceName) + if (sourceLegacyName) legacyNames.add(sourceLegacyName) + + for (const name of legacyNames) { + const legacyPath = path.resolve(resolveArtifactPathFromRoot(legacyRoot, artifact.kind, name)) + if (canonicalPath && path.resolve(canonicalPath) === legacyPath) continue + + const key = `${artifact.kind}:${legacyPath}` + if (seen.has(key)) continue + seen.add(key) + + candidates.push({ + expectedKind: artifact.kind === "prompt" ? "file" : "directory", + path: legacyPath, + }) + } + } + + return candidates +} + +export async function removeLegacyArtifactCandidates( + candidates: PiLegacyArtifactCandidate[], + removeFile: (filePath: string) => Promise, + removeDirectory: (dirPath: string) => Promise, +): Promise { + for (const candidate of candidates) { + let stats: Stats + try { + stats = await fs.lstat(candidate.path) + } catch (error) { + if ((error as NodeJS.ErrnoException).code === "ENOENT") continue + throw error + } + + if (candidate.expectedKind === "file") { + if (!stats.isFile() || stats.isSymbolicLink()) { + console.warn(`Warning: found ambiguous legacy Pi artifact at ${candidate.path}; leaving it in place because ownership cannot be proven.`) + continue + } + await removeFile(candidate.path) + continue + } + + if (!stats.isDirectory() || stats.isSymbolicLink()) { + console.warn(`Warning: found ambiguous legacy Pi artifact at ${candidate.path}; leaving it in place because ownership cannot be proven.`) + continue + } + + await removeDirectory(candidate.path) + } +} + +export async function planLegacyCustomRootInstallCleanup(options: { + legacyLayout: PiLayout + legacyTrustInfo: PiManagedTrustInfo + artifactCandidates?: PiLegacyArtifactCandidate[] +}): Promise { + const { legacyLayout, legacyTrustInfo } = options + const warnings: string[] = [] + const artifactCandidates: PiLegacyArtifactCandidate[] = [] + const installCleanupVerified = legacyTrustInfo.verifiedSections.install + const syncCleanupVerified = legacyTrustInfo.verifiedSections.sync + const installSection = installCleanupVerified ? legacyTrustInfo.state?.install : undefined + const syncSection = syncCleanupVerified ? legacyTrustInfo.state?.sync : undefined + const installArtifactPaths = new Set() + const syncArtifactPaths = new Set() + + for (const artifact of installSection?.artifacts ?? []) { + const artifactPath = resolveManagedArtifactPath(legacyLayout, artifact) + if (artifactPath) installArtifactPaths.add(canonicalizePiPath(artifactPath)) + } + + for (const artifact of syncSection?.artifacts ?? []) { + const artifactPath = resolveManagedArtifactPath(legacyLayout, artifact) + if (artifactPath) syncArtifactPaths.add(canonicalizePiPath(artifactPath)) + } + + for (const candidate of options.artifactCandidates ?? []) { + const resolvedPath = canonicalizePiPath(candidate.path) + if (!installCleanupVerified || !installArtifactPaths.has(resolvedPath)) { + if (await pathExists(resolvedPath)) { + warnings.push(`Warning: found ambiguous legacy Pi artifact at ${resolvedPath}; leaving it in place because install ownership cannot be proven.`) + } + continue + } + + if (syncCleanupVerified) { + if (syncArtifactPaths.has(resolvedPath)) continue + artifactCandidates.push({ ...candidate, path: resolvedPath }) + continue + } + + if (await pathExists(resolvedPath)) { + warnings.push(`Warning: found ambiguous legacy Pi artifact at ${resolvedPath}; leaving it in place because sync ownership cannot be proven.`) + } + } + + let removeCompatExtension = false + if (installSection?.sharedResources.compatExtension) { + const compatPath = path.join(legacyLayout.extensionsDir, "compound-engineering-compat.ts") + if (syncCleanupVerified) { + removeCompatExtension = syncSection?.sharedResources.compatExtension !== true + } else if (await pathExists(compatPath)) { + warnings.push(`Warning: found ambiguous legacy Pi shared resource at ${compatPath}; leaving it in place because sync ownership cannot be proven.`) + } + } + + let pruneMcporterKeys: string[] = [] + if ((installSection?.mcpServers.length ?? 0) > 0) { + if (syncCleanupVerified) { + const syncServers = new Set(syncSection?.mcpServers ?? []) + pruneMcporterKeys = installSection?.mcpServers.filter((server) => !syncServers.has(server)) ?? [] + } else if (await pathExists(legacyLayout.mcporterConfigPath)) { + warnings.push(`Warning: found ambiguous legacy mcporter.json at ${legacyLayout.mcporterConfigPath}; leaving it untouched because sync ownership cannot be proven.`) + } + } + + return { + artifactCandidates, + removeCompatExtension, + pruneMcporterKeys, + warnings, + } +} + +export async function getPiManagedTrustInfo(layout: PiLayout): Promise { + const trusted = await loadPiManagedStateWithTrust(layout) + return { + status: trusted.status, + state: trusted.state, + isVerified: trusted.status === "verified", + verifiedSections: trusted.verifiedSections, + } +} + +export function canUseTrustedNameMaps(info: PiManagedTrustInfo, sectionName: PiManagedTrustSectionName): boolean { + return Boolean(info.state && info.verifiedSections[sectionName] && hasSectionData(info.state[sectionName])) +} + +export function canUseVerifiedCleanup(info: PiManagedTrustInfo, sectionName: PiManagedTrustSectionName): boolean { + return info.verifiedSections[sectionName] || info.status === "verified" +} + +export function filterPiManagedStateForVerifiedSections( + state: PiManagedState | null, + verifiedSections: Partial>, +): PiManagedState | null { + if (!state) return null + + return { + ...state, + install: verifiedSections.install ? state.install : createPiManagedSection(), + sync: verifiedSections.sync ? state.sync : createPiManagedSection(), + nameMaps: mergeEffectiveNameMaps( + verifiedSections.install ? state.install.nameMaps : emptyPiNameMaps(), + verifiedSections.sync ? state.sync.nameMaps : emptyPiNameMaps(), + ), + } +} + +export function shouldWritePiManagedState(state: PiManagedState): boolean { + return hasSectionData(state.install) || hasSectionData(state.sync) +} + +export function isSafePiManagedName(name: string): boolean { + const trimmed = String(name || "").trim() + if (!trimmed) return false + if (trimmed.length > 64) return false + return /^[a-z0-9]+(?:-[a-z0-9]+)*$/.test(trimmed) +} + +export function mergePiNameMaps(primary?: PiNameMaps, secondary?: PiNameMaps): PiNameMaps { + return { + agents: { ...(primary?.agents ?? {}), ...(secondary?.agents ?? {}) }, + skills: { ...(primary?.skills ?? {}), ...(secondary?.skills ?? {}) }, + prompts: { ...(primary?.prompts ?? {}), ...(secondary?.prompts ?? {}) }, + } +} + +export function createPiManagedSectionHashPayload(root: string, section?: PiManagedSectionHashable): { + root: string + nameMaps: PiNameMaps + artifacts: Array<{ + kind: PiManagedArtifact["kind"] | undefined + sourceName: string | undefined + emittedName: string | undefined + relativePath: string | null + }> + mcpServers: string[] + sharedResources: { + compatExtension: boolean + mcporterConfig: boolean + } +} { + return { + root: path.resolve(root), + nameMaps: normalizeNameMaps(section?.nameMaps), + artifacts: dedupeArtifacts([...(section?.artifacts ?? [])]).map((artifact) => ({ + kind: artifact.kind, + sourceName: artifact.sourceName, + emittedName: artifact.emittedName, + relativePath: normalizeRelativeArtifactPath(artifact.relativePath), + })), + mcpServers: normalizeMcpServers(section?.mcpServers), + sharedResources: normalizeSharedResources(section?.sharedResources), + } +} + +function normalizeSection( + rawSection: PiManagedSection | undefined, + legacyNameMaps: PiNameMaps | undefined, + legacyPrompts: PiManagedLegacyArtifact[] | undefined, + legacyGeneratedSkills: PiManagedLegacyArtifact[] | undefined, + layout: PiLayout, + owner: PiManagedSectionName, +): PiManagedStateSection { + const rawNameMaps = rawSection ? rawSection.nameMaps : legacyNameMaps + const artifacts: PiManagedArtifact[] = [] + + for (const artifact of rawSection?.artifacts ?? []) { + const normalized = normalizeArtifact(artifact, layout) + if (normalized) artifacts.push(normalized) + } + + if (owner === "install") { + for (const artifact of legacyPrompts ?? []) { + const normalized = normalizeLegacyArtifact(artifact, "prompt", layout) + if (normalized) artifacts.push(normalized) + } + for (const artifact of legacyGeneratedSkills ?? []) { + const normalized = normalizeLegacyArtifact(artifact, "generated-skill", layout) + if (normalized) artifacts.push(normalized) + } + } + + if (owner === "sync") { + for (const artifact of legacyPrompts ?? []) { + const normalized = normalizeLegacyArtifact(artifact, "prompt", layout) + if (normalized) artifacts.push(normalized) + } + } + + return { + nameMaps: normalizeNameMaps(rawNameMaps), + artifacts: dedupeArtifacts(artifacts), + mcpServers: normalizeMcpServers(rawSection?.mcpServers), + sharedResources: { + compatExtension: rawSection?.sharedResources?.compatExtension === true, + mcporterConfig: rawSection?.sharedResources?.mcporterConfig === true, + }, + } +} + +function normalizeArtifact(artifact: PiManagedArtifact, layout: PiLayout): PiManagedArtifact | null { + if (!artifact || !artifact.kind || !artifact.sourceName || !isSafePiManagedName(artifact.emittedName)) { + return null + } + + const normalized: PiManagedArtifact = { + kind: artifact.kind, + sourceName: artifact.sourceName, + emittedName: artifact.emittedName, + relativePath: normalizeRelativeArtifactPath(artifact.relativePath) ?? "", + } + + return resolveManagedArtifactPath(layout, normalized) ? normalized : null +} + +function normalizeLegacyArtifact( + artifact: PiManagedLegacyArtifact, + kind: PiManagedArtifactKind, + layout: PiLayout, +): PiManagedArtifact | null { + if (!artifact?.sourceName || !artifact?.outputPath) return null + + const absolutePath = path.resolve(artifact.outputPath) + if (!isPathWithinRoot(layout.root, absolutePath)) return null + + const emittedName = kind === "prompt" + ? path.basename(absolutePath, path.extname(absolutePath)) + : path.basename(absolutePath) + + if (!isSafePiManagedName(emittedName)) return null + + const normalized: PiManagedArtifact = { + kind, + sourceName: artifact.sourceName, + emittedName, + relativePath: path.relative(layout.root, absolutePath), + } + + return resolveManagedArtifactPath(layout, normalized) ? normalized : null +} + +function normalizeNameMaps(nameMaps?: PiNameMaps): PiNameMaps { + return { + agents: normalizeNameMapEntries(nameMaps?.agents), + skills: normalizeNameMapEntries(nameMaps?.skills), + prompts: normalizeNameMapEntries(nameMaps?.prompts), + } +} + +function filterLegacyNameMapsForSection(nameMaps: PiNameMaps | undefined, owner: PiManagedSectionName): PiNameMaps { + const namespace = owner === "install" ? "compound-engineering:" : "claude-home:" + return { + agents: filterLegacyNameMapEntries(nameMaps?.agents, namespace), + skills: filterLegacyNameMapEntries(nameMaps?.skills, namespace), + prompts: filterLegacyNameMapEntries(nameMaps?.prompts, namespace), + } +} + +function filterLegacyNameMapEntries(entries: Record | undefined, namespace: string): Record { + const filtered: Record = {} + + for (const [alias, emittedName] of Object.entries(entries ?? {})) { + if (!alias.startsWith(namespace) || !isSafePiManagedName(emittedName)) continue + filtered[alias] = emittedName + } + + return filtered +} + +function normalizeNameMapEntries(entries?: Record): Record { + const normalized: Record = {} + + for (const [alias, emittedName] of Object.entries(entries ?? {})) { + if (!alias || !isSafePiManagedName(emittedName)) continue + normalized[alias] = emittedName + } + + return normalized +} + +function mergeEffectiveNameMaps(...maps: PiNameMaps[]): PiNameMaps { + return { + agents: mergeEffectiveNameMapEntries(...maps.map((map) => map.agents ?? {})), + skills: mergeEffectiveNameMapEntries(...maps.map((map) => map.skills ?? {})), + prompts: mergeEffectiveNameMapEntries(...maps.map((map) => map.prompts ?? {})), + } +} + +function mergeEffectiveNameMapEntries(...maps: Record[]): Record { + const merged: Record = {} + const conflicts = new Set() + + for (const entries of maps) { + for (const [alias, emittedName] of Object.entries(entries)) { + if (conflicts.has(alias)) continue + + const existing = merged[alias] + if (!existing) { + merged[alias] = emittedName + continue + } + + if (existing !== emittedName) { + delete merged[alias] + conflicts.add(alias) + } + } + } + + return merged +} + +function dedupeArtifacts(artifacts: PiManagedArtifact[]): PiManagedArtifact[] { + const byPath = new Map() + + for (const artifact of artifacts) { + byPath.set(`${artifact.kind}:${artifact.relativePath}`, artifact) + } + + return [...byPath.values()] +} + +function resolveArtifactPath(layout: PiLayout, kind: PiManagedArtifactKind, emittedName: string): string { + return resolveArtifactPathFromRoot(layout.root, kind, emittedName) +} + +function resolveArtifactPathFromRoot(root: string, kind: PiManagedArtifactKind, emittedName: string): string { + if (kind === "prompt") { + return path.join(root, "prompts", `${emittedName}.md`) + } + + return path.join(root, "skills", emittedName) +} + +function isArtifactPathExpected( + layout: PiLayout, + kind: PiManagedArtifactKind, + emittedName: string, + absolutePath: string, +): boolean { + const expectedPath = resolveArtifactPath(layout, kind, emittedName) + return path.resolve(expectedPath) === path.resolve(absolutePath) +} + +function normalizeRelativeArtifactPath(relativePath: string): string | null { + const trimmed = String(relativePath || "").trim() + if (!trimmed || path.isAbsolute(trimmed)) return null + + const normalized = path.normalize(trimmed) + if (normalized === ".." || normalized.startsWith(`..${path.sep}`)) return null + return normalized +} + +function resolveLegacyArtifactSourceName(sourceName: string): string | null { + const trimmed = String(sourceName || "").trim() + if (!trimmed) return null + + const legacyName = sanitizePathName(trimmed) + return isSafePathComponent(legacyName) ? legacyName : null +} + +function serializeSection(section: PiManagedStateSection): PiManagedSection | undefined { + const nameMaps = normalizeNameMaps(section.nameMaps) + const artifacts = dedupeArtifacts(section.artifacts) + const mcpServers = normalizeMcpServers(section.mcpServers) + const sharedResources = normalizeSharedResources(section.sharedResources) + + if (!hasNameMaps(nameMaps) && artifacts.length === 0 && mcpServers.length === 0 && !sharedResources.compatExtension && !sharedResources.mcporterConfig) return undefined + return { + nameMaps: hasNameMaps(nameMaps) ? nameMaps : undefined, + artifacts: artifacts.length > 0 ? artifacts : undefined, + mcpServers: mcpServers.length > 0 ? mcpServers : undefined, + sharedResources: sharedResources.compatExtension || sharedResources.mcporterConfig ? sharedResources : undefined, + } +} + +function serializeLegacyArtifacts( + layout: PiLayout, + artifacts: PiManagedArtifact[], + kind: PiManagedArtifactKind, +): PiManagedLegacyArtifact[] | undefined { + const legacy = artifacts + .filter((artifact) => artifact.kind === kind) + .map((artifact) => { + const outputPath = resolveManagedArtifactPath(layout, artifact) + if (!outputPath) return null + return { + sourceName: artifact.sourceName, + outputPath, + } + }) + .filter((artifact): artifact is PiManagedLegacyArtifact => artifact !== null) + + return legacy.length > 0 ? legacy : undefined +} + +function hasNameMaps(nameMaps?: PiNameMaps): boolean { + if (!nameMaps) return false + return Boolean( + Object.keys(nameMaps.agents ?? {}).length + || Object.keys(nameMaps.skills ?? {}).length + || Object.keys(nameMaps.prompts ?? {}).length, + ) +} + +function hasSectionData(section: PiManagedStateSection): boolean { + return hasNameMaps(section.nameMaps) + || section.artifacts.length > 0 + || section.mcpServers.length > 0 + || section.sharedResources.compatExtension + || section.sharedResources.mcporterConfig +} + +function normalizeMcpServers(servers?: string[]): string[] { + return [...new Set((servers ?? []).map((server) => String(server || "").trim()).filter(Boolean))].sort() +} + +function normalizeSharedResources(resources?: { compatExtension?: boolean; mcporterConfig?: boolean }) { + return { + compatExtension: resources?.compatExtension === true, + mcporterConfig: resources?.mcporterConfig === true, + } +} + +function emptyPiNameMaps(): PiNameMaps { + return { + agents: {}, + skills: {}, + prompts: {}, + } +} + +export function resolveSafePiName(sourceName: string): string { + const normalized = normalizePiSkillName(sourceName) + return isSafePiManagedName(normalized) ? normalized : "item" +} + +async function loadVerificationRecord(layout: PiLayout): Promise { + if (!(await pathExists(layout.verificationPath))) return null + + try { + const machineKey = await readMachineKey() + const verification = await readJson(layout.verificationPath) + if (verification.version !== PI_MANAGED_VERIFICATION_VERSION) return null + + const scopedInstallHash = verification.install?.hash + const scopedSyncHash = verification.sync?.hash + if ((scopedInstallHash && !scopedInstallHash.startsWith(machineKey + ":")) || (scopedSyncHash && !scopedSyncHash.startsWith(machineKey + ":"))) { + return null + } + + return { ...verification, machineKey } + } catch { + return null + } +} + +async function createVerificationRecord( + layout: PiLayout, + state: PiManagedState, + verifiedSections: Partial>, +): Promise { + const machineKey = await readMachineKey() + return { + version: PI_MANAGED_VERIFICATION_VERSION, + root: path.resolve(layout.root), + manifestPath: path.resolve(layout.managedManifestPath), + policyFingerprint: state.policyFingerprint, + install: verifiedSections.install !== false && hasSectionData(state.install) + ? { hash: signSectionHash(machineKey, hashManagedSection(layout, state.install)) } + : undefined, + sync: verifiedSections.sync !== false && hasSectionData(state.sync) + ? { hash: signSectionHash(machineKey, hashManagedSection(layout, state.sync)) } + : undefined, + } +} + +async function readMachineKey(): Promise { + const machineKeyPath = resolvePiManagedMachineKeyPath() + await ensureDir(resolvePiManagedMachineKeyDir()) + const existing = await readPersistedMachineKey(machineKeyPath) + if (existing) return existing + + const key = createHash("sha256").update(`${os.hostname()}:${process.pid}:${Date.now()}:${Math.random()}`).digest("hex") + try { + const handle = await fs.open(machineKeyPath, "wx", 0o600) + try { + await handle.writeFile(key + "\n", { encoding: "utf8" }) + await handle.chmod(0o600) + } finally { + await handle.close() + } + } catch (error) { + if ((error as NodeJS.ErrnoException).code !== "EEXIST") { + throw error + } + } + + const persisted = await readPersistedMachineKey(machineKeyPath) + if (!persisted) { + throw new Error(`Invalid Pi managed machine key at ${machineKeyPath}`) + } + return persisted +} + +async function readPersistedMachineKey(machineKeyPath: string): Promise { + if (!(await pathExists(machineKeyPath))) { + return null + } + + for (let attempt = 0; attempt < 5; attempt += 1) { + const existing = (await readText(machineKeyPath)).trim() + if (existing) { + return existing + } + + await new Promise((resolve) => setTimeout(resolve, 5)) + } + + throw new Error(`Invalid Pi managed machine key at ${machineKeyPath}`) +} + +function hashManagedSection(layout: PiLayout, section: PiManagedStateSection): string { + const payload = JSON.stringify(createPiManagedSectionHashPayload(layout.root, section)) + + return createHash("sha256").update(payload).digest("hex") +} + +function signSectionHash(machineKey: string, hash: string): string { + return `${machineKey}:${hash}` +} diff --git a/src/utils/pi-policy.ts b/src/utils/pi-policy.ts new file mode 100644 index 000000000..b9532c3b8 --- /dev/null +++ b/src/utils/pi-policy.ts @@ -0,0 +1,17 @@ +import { createHash } from "crypto" + +const PI_FOREIGN_TASK_POLICY_VERSION = "foreign-qualified-default-deny-v1" +const PI_POLICY_FINGERPRINT_ENV = "COMPOUND_ENGINEERING_PI_POLICY_FINGERPRINT" + +let piPolicyFingerprintOverride: string | null = null + +export function getPiPolicyFingerprint(): string { + if (piPolicyFingerprintOverride) return piPolicyFingerprintOverride + const envOverride = process.env[PI_POLICY_FINGERPRINT_ENV]?.trim() + if (envOverride) return envOverride + return createHash("sha256").update(PI_FOREIGN_TASK_POLICY_VERSION).digest("hex") +} + +export function setPiPolicyFingerprintForTests(fingerprint: string | null): void { + piPolicyFingerprintOverride = fingerprint +} diff --git a/src/utils/pi-skills.ts b/src/utils/pi-skills.ts new file mode 100644 index 000000000..001f31e82 --- /dev/null +++ b/src/utils/pi-skills.ts @@ -0,0 +1,1557 @@ +import { createHash } from "crypto" +import type { Dirent } from "fs" +import { promises as fs } from "fs" +import os from "os" +import path from "path" +import { dump } from "js-yaml" +import { assertNoSymlinkAncestors, ensureManagedDir, ensureManagedParentDir, pathExists, readText, writeFileAtomicIfChanged, writeTextAtomicIfChanged } from "./files" +import { parseFrontmatter } from "./frontmatter" +import { getPiPolicyFingerprint } from "./pi-policy" + +export const PI_CE_SUBAGENT_TOOL = "ce_subagent" + +export type PiNameMaps = { + agents?: Record + skills?: Record + prompts?: Record +} + +export type PiMaterializationOptions = { + trustedRoot?: string +} + +export type PiTransformOptions = { + preserveUnknownQualifiedRefs?: boolean + rejectUnknownQualifiedTaskRefs?: boolean + preserveUnresolvedFirstPartyQualifiedSkillRefs?: boolean + rejectUnresolvedFirstPartyQualifiedRefs?: boolean +} + +export type PiSkillMutationHooks = { + onBeforeMutate?: (mode: "incremental" | "replace") => void | Promise +} + +type PiSkillFullCompareHook = (targetDir: string) => void | Promise +type PiSkillSourceFingerprintHook = (sourceDir: string) => void | Promise +type PiSkillSourceAnalysisHook = (sourceDir: string) => void | Promise + +let piSkillFullCompareHook: PiSkillFullCompareHook | null = null +let piSkillSourceFingerprintHook: PiSkillSourceFingerprintHook | null = null +let piSkillSourceAnalysisHook: PiSkillSourceAnalysisHook | null = null + +export function setPiSkillFullCompareHookForTests(hook: PiSkillFullCompareHook | null): void { + piSkillFullCompareHook = hook +} + +export function setPiSkillSourceFingerprintHookForTests(hook: PiSkillSourceFingerprintHook | null): void { + piSkillSourceFingerprintHook = hook +} + +export function setPiSkillSourceAnalysisHookForTests(hook: PiSkillSourceAnalysisHook | null): void { + piSkillSourceAnalysisHook = hook +} + +const PI_MAX_NAME_LENGTH = 60 // Pi allows 64; leave room for dedup suffix like -2 +const PI_MANAGED_NAME_LIMIT = 64 + +export function normalizePiSkillName(value: string): string { + const trimmed = value.trim() + if (!trimmed) return "item" + + const normalized = trimmed + .toLowerCase() + .replace(/[\\/]+/g, "-") + .replace(/[:_\s]+/g, "-") + .replace(/[^a-z0-9-]+/g, "-") + .replace(/-+/g, "-") + .replace(/^-+|-+$/g, "") + .slice(0, PI_MAX_NAME_LENGTH) + .replace(/-+$/, "") + + return normalized || "item" +} + +export function uniquePiSkillName(base: string, used: Set): string { + if (!used.has(base)) { + used.add(base) + return base + } + + let index = 2 + while (true) { + const suffix = `-${index}` + const trimmedBase = base.slice(0, Math.max(1, PI_MANAGED_NAME_LIMIT - suffix.length)).replace(/-+$/, "") || "item" + const candidate = `${trimmedBase}${suffix}` + if (!used.has(candidate)) { + used.add(candidate) + return candidate + } + index += 1 + } +} + +export function buildPiSameRunQualifiedNameMap( + activeNameMap: Record, + namespace = "claude-home", +): Record { + const qualifiedNameMap: Record = {} + + for (const [sourceName, emittedName] of Object.entries(activeNameMap)) { + if (!sourceName || sourceName.startsWith(`${namespace}:`)) continue + qualifiedNameMap[`${namespace}:${sourceName}`] = emittedName + } + + return qualifiedNameMap +} + +export function collectPiSameRunDependencies(content: string): { + skills: string[] + prompts: string[] +} { + const skills = new Set() + const prompts = new Set() + const text = String(content || "") + + for (const match of text.matchAll(/\/skill:claude-home:([^\s)]+)/g)) { + if (match[1]) skills.add(match[1]) + } + for (const match of text.matchAll(/Task\s+claude-home:([^\s(]+)\s*\(/g)) { + if (match[1]) skills.add(match[1]) + } + for (const match of text.matchAll(/\/(?:prompt|prompts):claude-home:([^\s)]+)/g)) { + if (match[1]) prompts.add(match[1]) + } + + return { + skills: [...skills].sort(), + prompts: [...prompts].sort(), + } +} + +export function transformPiBodyContent(body: string, nameMaps?: PiNameMaps, options?: PiTransformOptions): string { + const lineBreak = body.includes("\r\n") ? "\r\n" : "\n" + const lines = body.split(/\r?\n/) + const transformed: string[] = [] + let activeFence: { char: "`" | "~"; length: number } | null = null + let inIndentedCodeBlock = false + let previousBlankLine = true + let inBlockquote = false + + for (const line of lines) { + const fence = readMarkdownFence(line) + const blankLine = line.trim().length === 0 + + if (activeFence) { + transformed.push(line) + if (fence && fence.char === activeFence.char && fence.length >= activeFence.length) { + activeFence = null + } + continue + } + + if (inIndentedCodeBlock) { + if (blankLine) { + transformed.push(line) + previousBlankLine = true + continue + } + + if (isIndentedCodeBlockLine(line)) { + transformed.push(line) + previousBlankLine = false + continue + } + + inIndentedCodeBlock = false + } + + if (fence) { + activeFence = fence + transformed.push(line) + previousBlankLine = false + continue + } + + if (inBlockquote) { + if (blankLine) { + inBlockquote = false + transformed.push(line) + previousBlankLine = true + continue + } + + if (/^\s*>/.test(line) || !isMarkdownBlockStarter(line)) { + transformed.push(line) + previousBlankLine = false + continue + } + + inBlockquote = false + } + + if (/^\s*>/.test(line)) { + inBlockquote = true + transformed.push(line) + previousBlankLine = false + continue + } + + if (previousBlankLine && isIndentedCodeBlockLine(line) && !isIndentedTaskBulletLine(line)) { + inIndentedCodeBlock = true + transformed.push(line) + previousBlankLine = false + continue + } + + transformed.push(transformPiMarkdownLine(line, nameMaps, options)) + previousBlankLine = blankLine + } + + return transformed.join(lineBreak) +} + +export { appendCompatibilityNoteIfNeeded } + +export async function skillFileMatchesPiTarget( + skillPath: string, + targetName: string, + nameMaps?: PiNameMaps, + options?: PiTransformOptions, +): Promise { + if (!(await pathExists(skillPath))) { + return false + } + + const raw = await readText(skillPath) + + try { + const parsed = parseFrontmatter(raw) + if (Object.keys(parsed.data).length === 0 && parsed.body === raw) { + return transformPiBodyContent(raw, nameMaps, options) === raw + } + + if (parsed.data.name !== targetName) { + return false + } + + return transformPiBodyContent(parsed.body, nameMaps, options) === parsed.body + } catch (error) { + console.warn(`Pi sync: failed to parse frontmatter in ${skillPath}:`, (error as Error).message) + const rewritten = renderPiSkillContent(raw, targetName, nameMaps, skillPath, options) + return rewritten === raw + } +} + +export async function piSkillTargetMatchesMaterializedSource( + sourceDir: string, + targetDir: string, + targetName: string, + nameMaps?: PiNameMaps, + options?: PiMaterializationOptions, + transformOptions?: PiTransformOptions, +): Promise { + const targetStats = await fs.lstat(targetDir).catch(() => null) + if (!targetStats || targetStats.isSymbolicLink() || !targetStats.isDirectory()) { + return false + } + + return materializedDirMatches(sourceDir, targetDir, targetName, nameMaps, new Set(), options, transformOptions) +} + +async function materializedDirMatches( + sourceDir: string, + targetDir: string, + targetName: string, + nameMaps?: PiNameMaps, + activeRealDirs = new Set(), + options?: PiMaterializationOptions, + transformOptions?: PiTransformOptions, +): Promise { + const realSourceDir = await fs.realpath(sourceDir) + if (activeRealDirs.has(realSourceDir)) { + throw cyclicPiSkillSymlinkError(sourceDir) + } + + activeRealDirs.add(realSourceDir) + + try { + const [sourceEntries, targetEntries] = await Promise.all([ + fs.readdir(sourceDir, { withFileTypes: true }), + fs.readdir(targetDir, { withFileTypes: true }), + ]) + + const comparableSourceEntries = (await Promise.all( + sourceEntries.map(async (entry) => resolvePiMaterializedEntry(path.join(sourceDir, entry.name), entry, undefined, options)), + )).filter((entry): entry is PiMaterializedEntry => entry !== null && entry.kind !== "skip") + + const sourceNames = comparableSourceEntries.map((entry) => entry.name).sort() + const targetNames = targetEntries.map((entry) => entry.name).sort() + if (sourceNames.length !== targetNames.length) { + return false + } + for (let i = 0; i < sourceNames.length; i += 1) { + if (sourceNames[i] !== targetNames[i]) return false + } + + for (const entry of comparableSourceEntries) { + const sourcePath = entry.sourcePath + const targetPath = path.join(targetDir, entry.name) + const targetStats = await fs.lstat(targetPath).catch(() => null) + if (!targetStats || targetStats.isSymbolicLink()) { + return false + } + + if (entry.kind === "directory") { + if (!targetStats.isDirectory()) return false + const matches = await materializedDirMatches(sourcePath, targetPath, targetName, nameMaps, activeRealDirs, options, transformOptions) + if (!matches) return false + continue + } + + if (entry.kind === "file") { + if (!targetStats.isFile()) return false + if (entry.name === "SKILL.md") { + const rewrittenMatches = await materializedSkillFileMatches(sourcePath, targetPath, targetName, nameMaps, transformOptions) + if (!rewrittenMatches) return false + continue + } + const matches = await fileContentsMatch(sourcePath, targetPath) + if (!matches) return false + continue + } + + return false + } + + return true + } catch (error) { + if (error instanceof Error && error.message.includes("cyclic directory symlink")) { + throw error + } + return false + } finally { + activeRealDirs.delete(realSourceDir) + } +} + +export async function preparePiSkillTargetForReplacement(targetDir: string): Promise { + await assertNoSymlinkAncestors(targetDir) + const existingStats = await fs.lstat(targetDir).catch(() => null) + if (!existingStats) return + + if (existingStats.isSymbolicLink()) { + await assertNoSymlinkAncestors(targetDir) + const rechecked = await fs.lstat(targetDir).catch(() => null) + if (!rechecked) return + if (!rechecked.isSymbolicLink()) { + throw new Error(`Refusing to replace unexpected Pi skill path ${targetDir}`) + } + await fs.unlink(targetDir) + return + } + + const parentDir = path.dirname(targetDir) + const baseName = path.basename(targetDir) + const existingBackups = (await fs.readdir(parentDir)) + .filter((entry) => entry.startsWith(`${baseName}.bak.`)) + + for (const oldBackup of existingBackups.sort().slice(0, -1)) { + const backupPath = path.join(parentDir, oldBackup) + await assertNoSymlinkAncestors(backupPath) + const backupStats = await fs.lstat(backupPath) + if (backupStats.isSymbolicLink()) continue + await fs.rm(backupPath, { recursive: true, force: true }) + } + + const backupPath = `${targetDir}.bak.${new Date().toISOString().replace(/[:.]/g, "-")}` + await assertNoSymlinkAncestors(targetDir) + await fs.rename(targetDir, backupPath) + console.warn(`Backed up existing Pi skill directory to ${backupPath}`) +} + +export async function copySkillDirForPi( + sourceDir: string, + targetDir: string, + targetName: string, + nameMaps?: PiNameMaps, + options?: PiMaterializationOptions, + transformOptions?: PiTransformOptions, + hooks?: PiSkillMutationHooks, +): Promise { + await validatePiSkillSourceForPi(sourceDir, targetName, nameMaps, transformOptions) + const planningResult = await planPiSkillDirUpdate( + sourceDir, + targetDir, + targetName, + nameMaps, + options, + transformOptions, + ) + + if (planningResult.result === "nochange") { + await writePiSkillFastPathRecord(targetDir, planningResult.renderSignature, planningResult.sourceMetadataSignature, planningResult.sourceFingerprint, planningResult.targetMetadataSignature) + return + } + + if (planningResult.result === "apply") { + await hooks?.onBeforeMutate?.("incremental") + await applyPiIncrementalOps(targetDir, planningResult.ops) + await writePiSkillFastPathRecord( + targetDir, + planningResult.renderSignature, + planningResult.sourceMetadataSignature, + planningResult.sourceFingerprint, + await buildPiTargetMetadataSignature(targetDir), + ) + return + } + + await hooks?.onBeforeMutate?.("replace") + await preparePiSkillTargetForReplacement(targetDir) + await copyDirForPiMaterialization(sourceDir, targetDir, new Set(), options) + await rewriteSkillFileForPi(path.join(targetDir, "SKILL.md"), targetName, nameMaps, transformOptions) +} + +async function validatePiSkillSourceForPi( + sourceDir: string, + targetName: string, + nameMaps?: PiNameMaps, + options?: PiTransformOptions, +): Promise { + const skillPath = path.join(sourceDir, "SKILL.md") + if (!(await pathExists(skillPath))) { + return + } + + const raw = await readText(skillPath) + void renderPiSkillContent(raw, targetName, nameMaps, skillPath, options) +} + +function cyclicPiSkillSymlinkError(sourcePath: string): Error { + return new Error(`Pi skill materialization detected a cyclic directory symlink at ${sourcePath}`) +} + +async function copyDirForPiMaterialization( + sourceDir: string, + targetDir: string, + activeRealDirs = new Set(), + options?: PiMaterializationOptions, +): Promise { + const realSourceDir = await fs.realpath(sourceDir) + if (activeRealDirs.has(realSourceDir)) { + throw cyclicPiSkillSymlinkError(sourceDir) + } + + activeRealDirs.add(realSourceDir) + + try { + await assertNoSymlinkAncestors(targetDir) + await fs.mkdir(targetDir, { recursive: true }) + const entries = await fs.readdir(sourceDir, { withFileTypes: true }) + + for (const entry of entries) { + const sourcePath = path.join(sourceDir, entry.name) + const targetPath = path.join(targetDir, entry.name) + const resolvedEntry = await resolvePiMaterializedEntry(sourcePath, entry, { logSkippedDanglingSymlinks: true }, options) + + if (!resolvedEntry || resolvedEntry.kind === "skip") { + continue + } + + const materializedSourcePath = resolvedEntry.sourcePath + + if (resolvedEntry.kind === "directory") { + await copyDirForPiMaterialization(materializedSourcePath, targetPath, activeRealDirs, options) + continue + } + + if (resolvedEntry.kind === "file") { + await fs.mkdir(path.dirname(targetPath), { recursive: true }) + await writeFileAtomicIfChanged({ + filePath: targetPath, + content: await fs.readFile(materializedSourcePath), + }) + continue + } + } + } finally { + activeRealDirs.delete(realSourceDir) + } +} + +export async function rewriteSkillFileForPi( + skillPath: string, + targetName: string, + nameMaps?: PiNameMaps, + options?: PiTransformOptions, +): Promise { + if (!(await pathExists(skillPath))) { + return + } + + const raw = await readText(skillPath) + const updated = renderPiSkillContent(raw, targetName, nameMaps, skillPath, options) + + if (updated !== raw) { + await writeTextAtomicIfChanged({ filePath: skillPath, content: updated }) + } +} + +function renderPiSkillContent( + raw: string, + targetName: string, + nameMaps?: PiNameMaps, + sourceLabel?: string, + options?: PiTransformOptions, +): string { + try { + const parsed = parseFrontmatter(raw) + if (Object.keys(parsed.data).length === 0 && parsed.body === raw) { + return transformPiBodyContent(raw, nameMaps, options) + } + + return formatPiFrontmatter( + { ...parsed.data, name: targetName }, + transformPiBodyContent(parsed.body, nameMaps, options), + ) + } catch (error) { + console.warn(`Pi sync: failed to parse frontmatter in ${sourceLabel ?? ""}:`, (error as Error).message) + const split = splitRawAtFrontmatterEnd(raw) + const body = split ? split.body : raw + const rewrittenBody = transformPiBodyContent(body, nameMaps, options) + return formatPiFrontmatter({ name: targetName }, rewrittenBody) + } +} + +async function materializedSkillFileMatches( + sourcePath: string, + targetPath: string, + targetName: string, + nameMaps?: PiNameMaps, + options?: PiTransformOptions, +): Promise { + const [sourceRaw, targetRaw] = await Promise.all([readText(sourcePath), readText(targetPath)]) + return renderPiSkillContent(sourceRaw, targetName, nameMaps, sourcePath, options) === targetRaw +} + +async function fileContentsMatch(sourcePath: string, targetPath: string): Promise { + const [sourceBuffer, targetBuffer] = await Promise.all([ + fs.readFile(sourcePath), + fs.readFile(targetPath), + ]) + + return sourceBuffer.equals(targetBuffer) +} + +function formatPiFrontmatter(data: Record, body: string): string { + const yaml = dump(data, { lineWidth: -1, noRefs: true }).trimEnd() + if (yaml.length === 0) { + return body + } + + return ["---", yaml, "---", "", body].join("\n") +} + +function splitRawAtFrontmatterEnd(raw: string): { frontmatter: string; body: string } | null { + const lines = raw.split(/\r?\n/) + if (lines[0]?.trim() !== "---") return null + for (let i = 1; i < lines.length; i++) { + if (lines[i].trim() === "---") { + return { + frontmatter: lines.slice(0, i + 1).join("\n") + "\n", + body: lines.slice(i + 1).join("\n"), + } + } + } + return null +} + +function normalizePiTaskAgentName(value: string, nameMaps?: PiNameMaps, options?: PiTransformOptions): string { + return resolvePiMappedName(value, { + primary: nameMaps?.agents, + secondary: nameMaps?.skills, + fallback: "leaf", + preserveUnknownQualifiedRefs: options?.preserveUnknownQualifiedRefs, + unresolvedFirstPartyQualifiedPolicy: options?.rejectUnresolvedFirstPartyQualifiedRefs ? "reject" : undefined, + }) +} + +function normalizePiSkillReferenceName(value: string, nameMaps?: PiNameMaps, options?: PiTransformOptions): string { + return resolvePiMappedName(value, { + primary: nameMaps?.skills, + secondary: nameMaps?.agents, + fallback: "full", + preserveUnknownQualifiedRefs: options?.preserveUnknownQualifiedRefs, + unresolvedFirstPartyQualifiedPolicy: options?.preserveUnresolvedFirstPartyQualifiedSkillRefs === false ? "reject" : "preserve", + }) +} + +function normalizePiPromptReferenceName(value: string, nameMaps?: PiNameMaps, options?: PiTransformOptions): string { + const trimmed = value.trim() + const rootNamespace = trimmed.split(":").filter(Boolean)[0] ?? "" + const isFirstPartyQualified = trimmed.includes(":") && ["compound-engineering", "claude-home"].includes(rootNamespace) + const leafName = trimmed.split(":").filter(Boolean).pop() ?? trimmed + if (isFirstPartyQualified && options?.rejectUnresolvedFirstPartyQualifiedRefs) { + if (!nameMaps?.prompts?.[trimmed] && !nameMaps?.prompts?.[leafName]) { + throw new Error(`Unsupported unresolved first-party qualified ref for Pi sync: ${trimmed}`) + } + } + + return resolvePiMappedName(value, { + primary: nameMaps?.prompts, + fallback: "full", + preserveUnknownQualifiedRefs: options?.preserveUnknownQualifiedRefs, + unresolvedFirstPartyQualifiedPolicy: options?.rejectUnresolvedFirstPartyQualifiedRefs ? "reject" : undefined, + }) +} + +function resolvePiMappedName( + value: string, + options: { + primary?: Record + secondary?: Record + fallback: "full" | "leaf" + preserveUnknownQualifiedRefs?: boolean + unresolvedFirstPartyQualifiedPolicy?: "preserve" | "reject" + }, +): string { + const trimmed = value.trim() + const leafName = trimmed.split(":").filter(Boolean).pop() ?? trimmed + const isQualified = trimmed.includes(":") + const rootNamespace = trimmed.split(":").filter(Boolean)[0] ?? "" + + const exactPrimary = options.primary?.[trimmed] + if (exactPrimary) return exactPrimary + + const exactSecondary = options.secondary?.[trimmed] + if (exactSecondary) return exactSecondary + + if ( + options.preserveUnknownQualifiedRefs + && isQualified + && !["compound-engineering", "claude-home"].includes(rootNamespace) + ) { + return trimmed + } + + const leafPrimary = options.primary?.[leafName] + const isFirstPartyQualified = isQualified && ["compound-engineering", "claude-home"].includes(rootNamespace) + if (isFirstPartyQualified && leafPrimary && options.unresolvedFirstPartyQualifiedPolicy === "preserve") { + return trimmed + } + if (isFirstPartyQualified && leafPrimary && options.unresolvedFirstPartyQualifiedPolicy === "reject") { + throw new Error(`Unsupported unresolved first-party qualified ref for Pi sync: ${trimmed}`) + } + if (leafPrimary) return leafPrimary + + const leafSecondary = options.secondary?.[leafName] + if (isFirstPartyQualified && leafSecondary && options.unresolvedFirstPartyQualifiedPolicy === "preserve") { + return trimmed + } + if (isFirstPartyQualified && leafSecondary && options.unresolvedFirstPartyQualifiedPolicy === "reject") { + throw new Error(`Unsupported unresolved first-party qualified ref for Pi sync: ${trimmed}`) + } + if (leafSecondary) return leafSecondary + + if (isFirstPartyQualified && rootNamespace === "claude-home" && options.unresolvedFirstPartyQualifiedPolicy === "preserve") { + return trimmed + } + + return options.fallback === "full" + ? normalizePiSkillName(trimmed) + : normalizePiSkillName(leafName) +} + +const PI_MCPORTER_SENTINEL = "" + +function appendCompatibilityNoteIfNeeded(body: string): string { + if (!/\bmcp\b/i.test(body)) return body + if (body.includes(PI_MCPORTER_SENTINEL)) return body + + const note = [ + "", + PI_MCPORTER_SENTINEL, + "## Pi + MCPorter note", + "For MCP access in Pi, use MCPorter via the generated tools:", + "- `mcporter_list` to inspect available MCP tools", + "- `mcporter_call` to invoke a tool", + "", + ].join("\n") + + return body + note +} + +type PiMaterializedEntry = { + kind: "directory" | "file" + name: string + sourcePath: string +} + +type PiMaterializedTreeNode = + | { + kind: "directory" + children: Map + } + | { + kind: "file" + sourcePath: string + renderedContent?: string + } + +type PiMaterializedTreeAnalysis = { + tree: PiMaterializedTreeNode + fingerprint: string + metadataSignature: string +} + +type PiMaterializedMetadataNode = { + kind: "directory" | "file" + name: string + sourcePath: string + metadataSignature?: string + children?: PiMaterializedMetadataNode[] +} + +type PiMaterializedMetadataSummary = { + metadataSignature: string + root: PiMaterializedMetadataNode +} + +type PiTargetTreeNode = { + kind: "directory" | "file" | "symlink" | "other" + children?: Map +} + +type PiTargetTreeAnalysis = { + tree: PiTargetTreeNode + metadataSignature: string +} + +type PiIncrementalOp = + | { + type: "createDir" + relativePath: string + } + | { + type: "writeFile" + relativePath: string + sourcePath?: string + renderedContent?: string + } + | { + type: "remove" + relativePath: string + targetKind: "directory" | "file" + } + +type PiMutationSnapshot = { + targetPath: string + existed: boolean + kind?: "directory" | "file" + tempPath?: string + mode?: number +} + +type PiSkillFastPathRecord = { + version: 3 + policyFingerprint: string + renderSignature: string + sourceMetadataSignature: string + sourceFingerprint: string + targetMetadataSignature: string +} + +async function planPiSkillDirUpdate( + sourceDir: string, + targetDir: string, + targetName: string, + nameMaps?: PiNameMaps, + options?: PiMaterializationOptions, + transformOptions?: PiTransformOptions, +): Promise< + | { result: "nochange"; renderSignature: string; sourceMetadataSignature: string; sourceFingerprint: string; targetMetadataSignature: string } + | { result: "apply"; ops: PiIncrementalOp[]; renderSignature: string; sourceMetadataSignature: string; sourceFingerprint: string } + | { result: "fallback" } +> { + const targetStats = await fs.lstat(targetDir).catch(() => null) + if (!targetStats) { + return { result: "fallback" } + } + + if (targetStats.isSymbolicLink() || !targetStats.isDirectory()) { + return { result: "fallback" } + } + + const sourceMetadataSummary = await buildPiMaterializedTreeMetadataSummary(sourceDir, options) + const sourceMetadataSignature = sourceMetadataSummary.metadataSignature + const renderSignature = buildPiSkillRenderSignature(targetName, nameMaps, transformOptions) + const targetAnalysis = await buildPiTargetTree(targetDir) + const targetMetadataSignature = targetAnalysis.metadataSignature + const cachedFastPath = await readPiSkillFastPathRecord(targetDir) + if (cachedFastPath + && cachedFastPath.policyFingerprint === getPiPolicyFingerprint() + && cachedFastPath.renderSignature === renderSignature + && cachedFastPath.sourceMetadataSignature === sourceMetadataSignature + && cachedFastPath.targetMetadataSignature === targetMetadataSignature) { + return { result: "nochange", renderSignature, sourceMetadataSignature, sourceFingerprint: cachedFastPath.sourceFingerprint, targetMetadataSignature } + } + + if (piSkillSourceFingerprintHook) { + await piSkillSourceFingerprintHook(sourceDir) + } + + await piSkillSourceAnalysisHook?.(sourceDir) + const sourceAnalysis = await analyzePiMaterializedTree(sourceMetadataSummary.root, targetName, nameMaps, transformOptions) + + if (piSkillFullCompareHook) { + await piSkillFullCompareHook(targetDir) + } + + const comparison = await planPiIncrementalOps(sourceAnalysis.tree, targetAnalysis.tree, targetDir) + + if (comparison.result === "nochange") { + return { + result: "nochange", + renderSignature, + sourceMetadataSignature, + sourceFingerprint: sourceAnalysis.fingerprint, + targetMetadataSignature, + } + } + + if (comparison.result === "fallback") { + return { result: "fallback" } + } + + return { + result: "apply", + ops: comparison.ops, + renderSignature, + sourceMetadataSignature: sourceAnalysis.metadataSignature, + sourceFingerprint: sourceAnalysis.fingerprint, + } +} + +function buildPiSkillRenderSignature( + targetName: string, + nameMaps?: PiNameMaps, + transformOptions?: PiTransformOptions, +): string { + return createHash("sha256").update(JSON.stringify(canonicalizeJsonValue({ + targetName, + nameMaps: nameMaps ?? null, + transformOptions: transformOptions ?? null, + }))).digest("hex") +} + +function canonicalizeJsonValue(value: unknown): unknown { + if (Array.isArray(value)) { + return value.map(canonicalizeJsonValue) + } + + if (value && typeof value === "object") { + return Object.fromEntries( + Object.entries(value as Record) + .sort(([left], [right]) => left.localeCompare(right)) + .map(([key, entryValue]) => [key, canonicalizeJsonValue(entryValue)]), + ) + } + + return value +} + +async function analyzePiMaterializedTree( + summary: PiMaterializedMetadataNode, + targetName: string, + nameMaps?: PiNameMaps, + transformOptions?: PiTransformOptions, +): Promise { + if (summary.kind !== "directory") { + throw new Error(`Expected Pi materialized directory summary for ${summary.sourcePath}`) + } + + const node: PiMaterializedTreeNode = { kind: "directory", children: new Map() } + const fingerprintHash = createHash("sha256") + + for (const child of summary.children ?? []) { + fingerprintHash.update(child.kind) + fingerprintHash.update("") + fingerprintHash.update(child.name) + fingerprintHash.update("") + + if (child.kind === "directory") { + const childAnalysis = await analyzePiMaterializedTree(child, targetName, nameMaps, transformOptions) + node.children.set(child.name, childAnalysis.tree) + fingerprintHash.update(childAnalysis.fingerprint) + continue + } + + if (child.name === "SKILL.md") { + const raw = await readText(child.sourcePath) + const renderedContent = renderPiSkillContent(raw, targetName, nameMaps, child.sourcePath, transformOptions) + node.children.set(child.name, { + kind: "file", + sourcePath: child.sourcePath, + renderedContent, + }) + fingerprintHash.update(renderedContent) + continue + } + + node.children.set(child.name, { + kind: "file", + sourcePath: child.sourcePath, + }) + fingerprintHash.update(await fs.readFile(child.sourcePath)) + } + + return { + tree: node, + fingerprint: fingerprintHash.digest("hex"), + metadataSignature: summary.metadataSignature ?? "", + } +} + +async function buildPiMaterializedTreeMetadataSummary( + sourceDir: string, + options?: PiMaterializationOptions, + activeRealDirs = new Set(), +): Promise { + const realSourceDir = await fs.realpath(sourceDir) + if (activeRealDirs.has(realSourceDir)) { + throw cyclicPiSkillSymlinkError(sourceDir) + } + + activeRealDirs.add(realSourceDir) + + try { + const hash = createHash("sha256") + const root: PiMaterializedMetadataNode = { + kind: "directory", + name: path.basename(sourceDir), + sourcePath: sourceDir, + children: [], + } + const entries = await fs.readdir(sourceDir, { withFileTypes: true }) + + for (const entry of entries.sort((left, right) => left.name.localeCompare(right.name))) { + const sourcePath = path.join(sourceDir, entry.name) + const resolvedEntry = await resolvePiMaterializedEntry(sourcePath, entry, undefined, options) + if (!resolvedEntry || resolvedEntry.kind === "skip") continue + + const stats = await fs.lstat(resolvedEntry.sourcePath) + hash.update(resolvedEntry.kind) + hash.update("") + hash.update(resolvedEntry.name) + hash.update("") + hash.update(String(stats.size)) + hash.update(":") + hash.update(String(stats.mtimeMs)) + hash.update("") + + if (resolvedEntry.kind === "directory") { + const childSummary = await buildPiMaterializedTreeMetadataSummary(resolvedEntry.sourcePath, options, activeRealDirs) + root.children!.push({ + kind: "directory", + name: resolvedEntry.name, + sourcePath: resolvedEntry.sourcePath, + metadataSignature: childSummary.metadataSignature, + children: childSummary.root.children, + }) + hash.update(childSummary.metadataSignature) + continue + } + + root.children!.push({ + kind: "file", + name: resolvedEntry.name, + sourcePath: resolvedEntry.sourcePath, + }) + } + + const metadataSignature = hash.digest("hex") + root.metadataSignature = metadataSignature + return { metadataSignature, root } + } finally { + activeRealDirs.delete(realSourceDir) + } +} + +async function buildPiTargetTree(targetDir: string): Promise { + const hash = createHash("sha256") + const tree = await buildPiTargetTreeNode(targetDir, hash, "") + return { + tree, + metadataSignature: hash.digest("hex"), + } +} + +async function buildPiTargetMetadataSignature(targetDir: string): Promise { + return (await buildPiTargetTree(targetDir)).metadataSignature +} + +async function buildPiTargetTreeNode(targetDir: string, hash: ReturnType, relativeDir: string): Promise { + const entries = await fs.readdir(targetDir, { withFileTypes: true }) + const children = new Map() + + for (const entry of entries.sort((left, right) => left.name.localeCompare(right.name))) { + const targetPath = path.join(targetDir, entry.name) + const relativePath = relativeDir ? path.join(relativeDir, entry.name) : entry.name + const stats = await fs.lstat(targetPath) + const kind = entry.isDirectory() ? "directory" : entry.isFile() ? "file" : entry.isSymbolicLink() ? "symlink" : "other" + hash.update(kind) + hash.update("") + hash.update(relativePath) + hash.update("") + hash.update(String(stats.size)) + hash.update(":") + hash.update(String(stats.mtimeMs)) + hash.update("") + + if (entry.isDirectory()) { + children.set(entry.name, await buildPiTargetTreeNode(targetPath, hash, relativePath)) + continue + } + + if (entry.isFile()) { + children.set(entry.name, { kind: "file" }) + continue + } + + if (entry.isSymbolicLink()) { + children.set(entry.name, { kind: "symlink" }) + continue + } + + children.set(entry.name, { kind: "other" }) + } + + return { kind: "directory", children } +} + +async function planPiIncrementalOps( + sourceTree: PiMaterializedTreeNode, + targetTree: PiTargetTreeNode, + targetDir: string, +): Promise< + | { result: "nochange" } + | { result: "fallback" } + | { result: "apply"; ops: PiIncrementalOp[] } +> { + const ops: PiIncrementalOp[] = [] + const comparison = await comparePiDirectoryNodes(sourceTree, targetTree, targetDir, "", ops) + + if (comparison === "fallback") { + return { result: "fallback" } + } + + if (ops.length === 0) { + return { result: "nochange" } + } + + return { result: "apply", ops } +} + +async function comparePiDirectoryNodes( + sourceNode: PiMaterializedTreeNode, + targetNode: PiTargetTreeNode, + targetDir: string, + relativeDir: string, + ops: PiIncrementalOp[], +): Promise<"ok" | "fallback"> { + if (sourceNode.kind !== "directory" || targetNode.kind !== "directory") { + return "fallback" + } + + const sourceChildren = sourceNode.children + const targetChildren = targetNode.children ?? new Map() + const names = [...new Set([...sourceChildren.keys(), ...targetChildren.keys()])].sort() + + for (const name of names) { + const sourceChild = sourceChildren.get(name) + const targetChild = targetChildren.get(name) + const relativePath = relativeDir ? path.join(relativeDir, name) : name + + if (!sourceChild && targetChild) { + if (targetChild.kind === "symlink" || targetChild.kind === "other") { + throw new Error(`Refusing to mutate unsafe Pi skill path ${path.join(targetDir, relativePath)}`) + } + + ops.push({ + type: "remove", + relativePath, + targetKind: targetChild.kind, + }) + continue + } + + if (sourceChild && !targetChild) { + appendPiCreateOps(sourceChild, relativePath, ops) + continue + } + + if (!sourceChild || !targetChild) { + continue + } + + if (targetChild.kind === "symlink" || targetChild.kind === "other") { + throw new Error(`Refusing to mutate unsafe Pi skill path ${path.join(targetDir, relativePath)}`) + } + + if (sourceChild.kind !== targetChild.kind) { + return "fallback" + } + + if (sourceChild.kind === "directory") { + const nested = await comparePiDirectoryNodes(sourceChild, targetChild, targetDir, relativePath, ops) + if (nested === "fallback") { + return "fallback" + } + continue + } + + const matches = await materializedFileNodeMatchesTarget(sourceChild, path.join(targetDir, relativePath)) + if (!matches) { + ops.push({ + type: "writeFile", + relativePath, + sourcePath: sourceChild.sourcePath, + renderedContent: sourceChild.renderedContent, + }) + } + } + + return "ok" +} + +function appendPiCreateOps(node: PiMaterializedTreeNode, relativePath: string, ops: PiIncrementalOp[]): void { + if (node.kind === "directory") { + ops.push({ type: "createDir", relativePath }) + for (const [name, child] of [...node.children.entries()].sort(([left], [right]) => left.localeCompare(right))) { + appendPiCreateOps(child, path.join(relativePath, name), ops) + } + return + } + + ops.push({ + type: "writeFile", + relativePath, + sourcePath: node.sourcePath, + renderedContent: node.renderedContent, + }) +} + +async function materializedFileNodeMatchesTarget(node: Extract, targetPath: string): Promise { + if (node.renderedContent !== undefined) { + const targetRaw = await readText(targetPath) + return node.renderedContent === targetRaw + } + + return fileContentsMatch(node.sourcePath, targetPath) +} + +async function readPiSkillFastPathRecord(targetDir: string): Promise { + const recordPath = resolvePiSkillFastPathRecordPath(targetDir) + try { + const parsed = JSON.parse(await readText(recordPath)) as PiSkillFastPathRecord + if (parsed.version !== 3) return null + if (!parsed.policyFingerprint || !parsed.renderSignature || !parsed.sourceMetadataSignature || !parsed.sourceFingerprint || !parsed.targetMetadataSignature) return null + return parsed + } catch { + return null + } +} + +async function writePiSkillFastPathRecord( + targetDir: string, + renderSignature: string, + sourceMetadataSignature: string, + sourceFingerprint: string, + targetMetadataSignature: string, +): Promise { + const recordPath = resolvePiSkillFastPathRecordPath(targetDir) + const record: PiSkillFastPathRecord = { + version: 3, + policyFingerprint: getPiPolicyFingerprint(), + renderSignature, + sourceMetadataSignature, + sourceFingerprint, + targetMetadataSignature, + } + + await fs.mkdir(path.dirname(recordPath), { recursive: true }) + await writeTextAtomicIfChanged({ + filePath: recordPath, + content: JSON.stringify(record, null, 2) + "\n", + }) +} + +function resolvePiSkillFastPathRecordPath(targetDir: string): string { + const stateHome = process.env.COMPOUND_ENGINEERING_HOME || os.homedir() + const identity = createHash("sha256").update(path.resolve(targetDir)).digest("hex") + return path.join(stateHome, ".compound-engineering", "pi-skill-fingerprints", `${identity}.json`) +} + +async function applyPiIncrementalOps(targetDir: string, ops: PiIncrementalOp[]): Promise { + const snapshotRoot = await fs.mkdtemp(path.join(os.tmpdir(), "pi-skill-update-")) + const snapshots = new Map() + + try { + for (const op of ops) { + const targetPath = path.join(targetDir, op.relativePath) + await capturePiMutationSnapshotIfNeeded(targetPath, snapshotRoot, snapshots) + await applyPiIncrementalOp(targetDir, op) + } + } catch (error) { + await restorePiMutationSnapshots(snapshots) + throw error + } finally { + await fs.rm(snapshotRoot, { recursive: true, force: true }).catch(() => undefined) + } +} + +async function applyPiIncrementalOp(targetDir: string, op: PiIncrementalOp): Promise { + const targetPath = path.join(targetDir, op.relativePath) + + if (op.type === "createDir") { + await ensureSafePiMutationTarget(targetPath, "missing") + await ensureManagedDir(targetPath) + await assertNoSymlinkAncestors(targetPath) + return + } + + if (op.type === "remove") { + await removePiMaterializedPath(targetPath, op.targetKind) + return + } + + await ensureManagedParentDir(targetPath) + await ensureSafePiMutationTarget(targetPath, "file") + + if (op.renderedContent !== undefined) { + await writeTextAtomicIfChanged({ filePath: targetPath, content: op.renderedContent }) + return + } + + if (!op.sourcePath) { + throw new Error(`Missing Pi materialized source for ${targetPath}`) + } + + const sourceBuffer = await fs.readFile(op.sourcePath) + await writeFileAtomicIfChanged({ filePath: targetPath, content: sourceBuffer }) +} + +async function ensureSafePiMutationTarget(targetPath: string, expected: "missing" | "file"): Promise { + await assertNoSymlinkAncestors(targetPath) + const stats = await fs.lstat(targetPath).catch(() => null) + if (!stats) return + if (stats.isSymbolicLink()) { + throw new Error(`Refusing to mutate unsafe Pi skill path ${targetPath}`) + } + if (expected === "missing") { + if (stats.isDirectory()) return + throw new Error(`Refusing to replace unexpected Pi skill path ${targetPath}`) + } + if (!stats.isFile()) { + throw new Error(`Refusing to replace unexpected Pi skill path ${targetPath}`) + } +} + +async function removePiMaterializedPath(targetPath: string, expectedKind: "directory" | "file"): Promise { + await assertNoSymlinkAncestors(targetPath) + const stats = await fs.lstat(targetPath).catch(() => null) + if (!stats) return + if (stats.isSymbolicLink()) { + throw new Error(`Refusing to remove unsafe Pi skill path ${targetPath}`) + } + + if (expectedKind === "directory") { + if (!stats.isDirectory()) { + throw new Error(`Refusing to remove unexpected Pi skill path ${targetPath}`) + } + await assertNoSymlinkAncestors(targetPath) + const rechecked = await fs.lstat(targetPath) + if (!rechecked.isDirectory() || rechecked.isSymbolicLink()) { + throw new Error(`Refusing to remove unexpected Pi skill path ${targetPath}`) + } + await fs.rm(targetPath, { recursive: true, force: true }) + return + } + + if (!stats.isFile()) { + throw new Error(`Refusing to remove unexpected Pi skill path ${targetPath}`) + } + await assertNoSymlinkAncestors(targetPath) + const rechecked = await fs.lstat(targetPath) + if (!rechecked.isFile() || rechecked.isSymbolicLink()) { + throw new Error(`Refusing to remove unexpected Pi skill path ${targetPath}`) + } + await fs.unlink(targetPath) +} + +async function capturePiMutationSnapshotIfNeeded( + targetPath: string, + snapshotRoot: string, + snapshots: Map, +): Promise { + if (snapshots.has(targetPath)) return + + await assertNoSymlinkAncestors(targetPath) + const stats = await fs.lstat(targetPath).catch(() => null) + if (!stats) { + snapshots.set(targetPath, { targetPath, existed: false }) + return + } + + if (stats.isSymbolicLink()) { + throw new Error(`Refusing to snapshot symlink target ${targetPath}`) + } + + const tempPath = path.join(snapshotRoot, `${snapshots.size}`) + + if (stats.isDirectory()) { + await copyPiSnapshotDirectory(targetPath, tempPath) + snapshots.set(targetPath, { + targetPath, + existed: true, + kind: "directory", + tempPath, + mode: stats.mode & 0o777, + }) + return + } + + if (!stats.isFile()) { + throw new Error(`Refusing to snapshot non-file target ${targetPath}`) + } + + await ensureManagedParentDir(tempPath) + await fs.copyFile(targetPath, tempPath) + snapshots.set(targetPath, { + targetPath, + existed: true, + kind: "file", + tempPath, + mode: stats.mode & 0o777, + }) +} + +async function restorePiMutationSnapshots(snapshots: Map): Promise { + const ordered = [...snapshots.values()].sort((left, right) => right.targetPath.length - left.targetPath.length) + + for (const snapshot of ordered) { + if (!snapshot.existed) { + await removePiMutationTargetIfPresent(snapshot.targetPath) + continue + } + + if (snapshot.kind === "directory") { + await removePiMutationTargetIfPresent(snapshot.targetPath) + await assertNoSymlinkAncestors(snapshot.targetPath) + await copyPiSnapshotDirectory(snapshot.tempPath!, snapshot.targetPath) + if (snapshot.mode !== undefined) { + await fs.chmod(snapshot.targetPath, snapshot.mode) + } + continue + } + + await removePiMutationTargetIfPresent(snapshot.targetPath) + await assertNoSymlinkAncestors(snapshot.targetPath) + await ensureManagedParentDir(snapshot.targetPath) + await fs.copyFile(snapshot.tempPath!, snapshot.targetPath) + if (snapshot.mode !== undefined) { + await fs.chmod(snapshot.targetPath, snapshot.mode) + } + } +} + +async function removePiMutationTargetIfPresent(targetPath: string): Promise { + await assertNoSymlinkAncestors(targetPath) + const stats = await fs.lstat(targetPath).catch(() => null) + if (!stats) return + if (stats.isSymbolicLink()) { + throw new Error(`Refusing to restore through symlink target ${targetPath}`) + } + if (stats.isDirectory()) { + await assertNoSymlinkAncestors(targetPath) + const rechecked = await fs.lstat(targetPath) + if (!rechecked.isDirectory() || rechecked.isSymbolicLink()) { + throw new Error(`Refusing to restore unexpected Pi skill path ${targetPath}`) + } + await fs.rm(targetPath, { recursive: true, force: true }) + return + } + if (stats.isFile()) { + await assertNoSymlinkAncestors(targetPath) + const rechecked = await fs.lstat(targetPath) + if (!rechecked.isFile() || rechecked.isSymbolicLink()) { + throw new Error(`Refusing to restore unexpected Pi skill path ${targetPath}`) + } + await fs.unlink(targetPath) + return + } + throw new Error(`Refusing to restore unexpected Pi skill path ${targetPath}`) +} + +async function copyPiSnapshotDirectory(sourceDir: string, targetDir: string): Promise { + await assertNoSymlinkAncestors(targetDir) + await fs.mkdir(targetDir, { recursive: true }) + const entries = await fs.readdir(sourceDir, { withFileTypes: true }) + + for (const entry of entries) { + const sourcePath = path.join(sourceDir, entry.name) + const targetPath = path.join(targetDir, entry.name) + + if (entry.isDirectory()) { + await copyPiSnapshotDirectory(sourcePath, targetPath) + continue + } + + if (entry.isFile()) { + await assertNoSymlinkAncestors(targetPath) + await fs.mkdir(path.dirname(targetPath), { recursive: true }) + await fs.copyFile(sourcePath, targetPath) + continue + } + + throw new Error(`Refusing to snapshot unexpected Pi skill path ${sourcePath}`) + } +} + +function transformPiMarkdownLine(line: string, nameMaps?: PiNameMaps, options?: PiTransformOptions): string { + const literals: string[] = [] + const protectedLine = line.replace(/(`+)([^`]*?)\1/g, (match) => { + const index = literals.push(match) - 1 + return `@@PI_LITERAL_${index}@@` + }) + + const taskPattern = /^(\s*(?:(?:[-*])\s+|\d+\.\s+)?)Task\s+([a-z][a-z0-9:_-]*)\(([^)]*)\)/ + let result = protectedLine.replace(taskPattern, (_match, prefix: string, agentName: string, args: string) => { + const normalizedAgent = normalizePiTaskAgentName(agentName, nameMaps, options) + if (normalizedAgent === agentName && normalizedAgent.includes(":") && options?.preserveUnknownQualifiedRefs) { + if (options.rejectUnknownQualifiedTaskRefs) { + throw new Error(`Unsupported foreign qualified Task ref for Pi sync: ${agentName}`) + } + return _match + } + const trimmedArgs = args.trim().replace(/\s+/g, " ").replace(/^["']|["']$/g, "") + return trimmedArgs + ? `${prefix}Run ${PI_CE_SUBAGENT_TOOL} with agent="${normalizedAgent}" and task="${trimmedArgs}".` + : `${prefix}Run ${PI_CE_SUBAGENT_TOOL} with agent="${normalizedAgent}".` + }) + + result = result.replace(/\bRun (?:subagent|ce_subagent) with agent="([^"]+)"/g, (_match, agentName: string) => { + const normalizedAgent = normalizePiTaskAgentName(agentName, nameMaps, options) + return `Run ${PI_CE_SUBAGENT_TOOL} with agent="${normalizedAgent}"` + }) + result = result.replace(/\bAskUserQuestion\b/g, "ask_user_question") + result = result.replace(/\bTodoWrite\b/g, "file-based todos (todos/ + /skill:todo-create)") + result = result.replace(/\bTodoRead\b/g, "file-based todos (todos/ + /skill:todo-create)") + + const slashCommandPattern = /(? { + if (commandName.includes("/")) return match + if (["dev", "tmp", "etc", "usr", "var", "bin", "home"].includes(commandName)) { + return match + } + + if (commandName.startsWith("skill:")) { + const skillName = commandName.slice("skill:".length) + return `/skill:${normalizePiSkillReferenceName(skillName, nameMaps, options)}` + } + + if (commandName.startsWith("prompts:")) { + const promptName = commandName.slice("prompts:".length) + const normalizedPrompt = normalizePiPromptReferenceName(promptName, nameMaps, options) + if (normalizedPrompt === promptName && normalizedPrompt.includes(":")) { + return match + } + return `/${normalizedPrompt}` + } + + const withoutPrefix = commandName.startsWith("prompts:") + ? commandName.slice("prompts:".length) + : commandName + + return `/${nameMaps?.prompts?.[withoutPrefix] ?? normalizePiSkillName(withoutPrefix)}` + }) + + return result.replace(/@@PI_LITERAL_(\d+)@@/g, (_match, index: string) => literals[Number(index)] ?? _match) +} + +function readMarkdownFence(line: string): { char: "`" | "~"; length: number } | null { + const trimmed = line.trimStart() + const match = trimmed.match(/^(`{3,}|~{3,})/) + if (!match) return null + return { + char: match[1][0] as "`" | "~", + length: match[1].length, + } +} + +function isIndentedCodeBlockLine(line: string): boolean { + return /^(?: {4}|\t)/.test(line) +} + +function isIndentedTaskBulletLine(line: string): boolean { + return /^\s+(?:[-*]\s+|\d+\.\s+)Task\s+[a-z][a-z0-9:_-]*\(/.test(line) +} + +function isMarkdownBlockStarter(line: string): boolean { + const trimmed = line.trimStart() + if (trimmed.length === 0) return false + return /^(?:[-*+]\s+|\d+\.\s+|#{1,6}\s|```|~~~|>)/.test(trimmed) +} + +async function resolvePiMaterializedEntry( + sourcePath: string, + entry: Dirent, + options?: { logSkippedDanglingSymlinks?: boolean }, + materialization?: PiMaterializationOptions, +): Promise { + if (entry.isDirectory()) { + return { kind: "directory", name: entry.name, sourcePath } + } + + if (entry.isFile()) { + return { kind: "file", name: entry.name, sourcePath } + } + + if (!entry.isSymbolicLink()) { + return null + } + + try { + const [stats, resolvedPath] = await Promise.all([ + fs.stat(sourcePath), + fs.realpath(sourcePath), + ]) + + if (materialization?.trustedRoot) { + const trustedRoot = path.resolve(materialization.trustedRoot) + const withinTrustedRoot = resolvedPath === trustedRoot || resolvedPath.startsWith(trustedRoot + path.sep) + if (!withinTrustedRoot) { + console.warn(`Pi sync: skipping symlink outside trusted root ${sourcePath} -> ${resolvedPath}`) + return { kind: "skip" } + } + } + + if (stats.isDirectory()) { + return { kind: "directory", name: entry.name, sourcePath: resolvedPath } + } + + if (stats.isFile()) { + return { kind: "file", name: entry.name, sourcePath: resolvedPath } + } + } catch (error) { + if ((error as NodeJS.ErrnoException).code === "ENOENT") { + if (options?.logSkippedDanglingSymlinks) { + console.warn(`Pi sync: skipping dangling symlink ${sourcePath}`) + } + return { kind: "skip" } + } + throw error + } + + return null +} diff --git a/src/utils/pi-trust-contract.ts b/src/utils/pi-trust-contract.ts new file mode 100644 index 000000000..bcea44881 --- /dev/null +++ b/src/utils/pi-trust-contract.ts @@ -0,0 +1,33 @@ +export type PiSharedResourceContract = { + state: "active" | "preserved-untrusted" | "absent" + retain: boolean + advertise: boolean +} + +export function derivePiSharedResourceContract(options: { + nextOwns?: boolean + otherVerifiedOwner?: boolean + preserveUntrusted?: boolean +}): PiSharedResourceContract { + if (options.nextOwns || options.otherVerifiedOwner) { + return { + state: "active", + retain: true, + advertise: true, + } + } + + if (options.preserveUntrusted) { + return { + state: "preserved-untrusted", + retain: false, + advertise: false, + } + } + + return { + state: "absent", + retain: false, + advertise: false, + } +} diff --git a/tests/claude-home.test.ts b/tests/claude-home.test.ts index 23937d1a0..e1d210fef 100644 --- a/tests/claude-home.test.ts +++ b/tests/claude-home.test.ts @@ -79,4 +79,25 @@ describe("loadClaudeHome", () => { expect(config.skills[0]?.description).toBeUndefined() expect(config.skills[0]?.argumentHint).toBeUndefined() }) + + test("records personal skill entry dir, lexical trusted root, and canonical trusted boundary separately", async () => { + const tempHome = await fs.mkdtemp(path.join(os.tmpdir(), "claude-home-symlink-root-")) + const actualSkillsRoot = path.join(tempHome, "actual-skills") + const linkedSkillsRoot = path.join(tempHome, "skills") + const externalSkillDir = path.join(tempHome, "external-skill") + + await fs.mkdir(actualSkillsRoot, { recursive: true }) + await fs.mkdir(externalSkillDir, { recursive: true }) + await fs.writeFile(path.join(externalSkillDir, "SKILL.md"), "---\nname: reviewer\n---\nReview things.\n") + await fs.symlink(actualSkillsRoot, linkedSkillsRoot) + await fs.symlink(externalSkillDir, path.join(linkedSkillsRoot, "reviewer")) + + const config = await loadClaudeHome(tempHome) + + expect(config.skills).toHaveLength(1) + expect(config.skills[0]?.entryDir).toBe(path.join(linkedSkillsRoot, "reviewer")) + expect(config.skills[0]?.trustedRoot).toBe(linkedSkillsRoot) + expect(config.skills[0]?.trustedBoundary).toBe(externalSkillDir) + expect(config.skills[0]?.sourceDir).toBe(externalSkillDir) + }) }) diff --git a/tests/files.test.ts b/tests/files.test.ts new file mode 100644 index 000000000..526b4bad8 --- /dev/null +++ b/tests/files.test.ts @@ -0,0 +1,111 @@ +import { describe, expect, test } from "bun:test" +import { promises as fs } from "fs" +import os from "os" +import path from "path" +import { + assertSafePathComponent, + backupFile, + captureManagedPathSnapshot, + removeFileIfExists, + removeManagedPathIfExists, + restoreManagedPathSnapshot, + writeFileAtomicIfChanged, +} from "../src/utils/files" + +describe("managed file mutations", () => { + test("rejects unsafe path components before path joins", () => { + expect(() => assertSafePathComponent("prompt-one", "prompt name")).not.toThrow() + expect(() => assertSafePathComponent("../escape", "prompt name")).toThrow("Unsafe prompt name") + expect(() => assertSafePathComponent("nested/path", "prompt name")).toThrow("Unsafe prompt name") + }) + + test("rejects binary writes through symlinked ancestor directories", async () => { + const tempRoot = await fs.mkdtemp(path.join(os.tmpdir(), "files-write-ancestor-symlink-")) + const externalRoot = path.join(tempRoot, "external") + const managedRoot = path.join(tempRoot, "managed") + const symlinkedDir = path.join(managedRoot, "compound-engineering") + const targetPath = path.join(symlinkedDir, "mcporter.json") + + await fs.mkdir(externalRoot, { recursive: true }) + await fs.mkdir(managedRoot, { recursive: true }) + await fs.symlink(externalRoot, symlinkedDir) + + await expect(writeFileAtomicIfChanged({ + filePath: targetPath, + content: Buffer.from("hello\n"), + })).rejects.toThrow("symlinked ancestor") + + await expect(fs.access(path.join(externalRoot, "mcporter.json"))).rejects.toBeDefined() + }) + + test("rejects managed deletes through symlinked ancestor directories", async () => { + const tempRoot = await fs.mkdtemp(path.join(os.tmpdir(), "files-delete-ancestor-symlink-")) + const externalRoot = path.join(tempRoot, "external") + const managedRoot = path.join(tempRoot, "managed") + const symlinkedDir = path.join(managedRoot, "compound-engineering") + const targetPath = path.join(symlinkedDir, "mcporter.json") + + await fs.mkdir(externalRoot, { recursive: true }) + await fs.mkdir(managedRoot, { recursive: true }) + await fs.writeFile(path.join(externalRoot, "mcporter.json"), "external\n") + await fs.symlink(externalRoot, symlinkedDir) + + await expect(removeManagedPathIfExists(targetPath)).rejects.toThrow("symlinked ancestor") + + expect(await fs.readFile(path.join(externalRoot, "mcporter.json"), "utf8")).toBe("external\n") + }) + + test("rejects plain file deletes through symlinked ancestor directories", async () => { + const tempRoot = await fs.mkdtemp(path.join(os.tmpdir(), "files-plain-delete-ancestor-symlink-")) + const externalRoot = path.join(tempRoot, "external") + const managedRoot = path.join(tempRoot, "managed") + const symlinkedDir = path.join(managedRoot, "compound-engineering") + const targetPath = path.join(symlinkedDir, "mcporter.json") + + await fs.mkdir(externalRoot, { recursive: true }) + await fs.mkdir(managedRoot, { recursive: true }) + await fs.writeFile(path.join(externalRoot, "mcporter.json"), "external\n") + await fs.symlink(externalRoot, symlinkedDir) + + await expect(removeFileIfExists(targetPath)).rejects.toThrow("symlinked ancestor") + + expect(await fs.readFile(path.join(externalRoot, "mcporter.json"), "utf8")).toBe("external\n") + }) + + test("preserves source permissions when creating backups", async () => { + const tempRoot = await fs.mkdtemp(path.join(os.tmpdir(), "files-backup-perms-")) + const sourcePath = path.join(tempRoot, "mcporter.json") + + await fs.writeFile(sourcePath, "{}\n", { mode: 0o600 }) + await fs.chmod(sourcePath, 0o600) + + const backupPath = await backupFile(sourcePath) + expect(backupPath).toBeDefined() + + const stats = await fs.stat(backupPath!) + expect(stats.mode & 0o777).toBe(0o600) + }) + + test("rejects snapshot restore through symlinked ancestor directories", async () => { + const tempRoot = await fs.mkdtemp(path.join(os.tmpdir(), "files-restore-ancestor-symlink-")) + const managedRoot = path.join(tempRoot, "managed") + const safeParent = path.join(managedRoot, "prompts") + const targetPath = path.join(safeParent, "plan-review.md") + const snapshotRoot = path.join(tempRoot, "snapshots") + const externalRoot = path.join(tempRoot, "external") + + await fs.mkdir(safeParent, { recursive: true }) + await fs.mkdir(snapshotRoot, { recursive: true }) + await fs.mkdir(externalRoot, { recursive: true }) + await fs.writeFile(targetPath, "original\n") + + const snapshot = await captureManagedPathSnapshot(targetPath, snapshotRoot) + + await fs.rename(safeParent, `${safeParent}-bak`) + await fs.symlink(externalRoot, safeParent) + + await expect(restoreManagedPathSnapshot(snapshot)).rejects.toThrow("symlinked ancestor") + + await expect(fs.access(path.join(externalRoot, "plan-review.md"))).rejects.toBeDefined() + }) +}) diff --git a/tests/path-sanitization.test.ts b/tests/path-sanitization.test.ts index 4fb8e57a0..aa1aca455 100644 --- a/tests/path-sanitization.test.ts +++ b/tests/path-sanitization.test.ts @@ -1,7 +1,7 @@ import { describe, expect, test } from "bun:test" import path from "path" import { loadClaudePlugin } from "../src/parsers/claude" -import { sanitizePathName } from "../src/utils/files" +import { assertSafePathComponent, sanitizePathName } from "../src/utils/files" const pluginRoot = path.join(process.cwd(), "plugins", "compound-engineering") @@ -18,6 +18,11 @@ describe("sanitizePathName", () => { test("handles multiple colons", () => { expect(sanitizePathName("a:b:c")).toBe("a-b-c") }) + + test("still rejects traversal and separators after sanitization", () => { + expect(() => assertSafePathComponent(sanitizePathName("../escape"), "artifact name")).toThrow("Unsafe artifact name") + expect(() => assertSafePathComponent(sanitizePathName("nested/path"), "artifact name")).toThrow("Unsafe artifact name") + }) }) describe("path sanitization collision detection", () => { diff --git a/tests/pi-converter.test.ts b/tests/pi-converter.test.ts index b68b6049f..b089512f2 100644 --- a/tests/pi-converter.test.ts +++ b/tests/pi-converter.test.ts @@ -3,6 +3,7 @@ import path from "path" import { loadClaudePlugin } from "../src/parsers/claude" import { convertClaudeToPi } from "../src/converters/claude-to-pi" import { parseFrontmatter } from "../src/utils/frontmatter" +import { appendCompatibilityNoteIfNeeded, transformPiBodyContent } from "../src/utils/pi-skills" import type { ClaudePlugin } from "../src/types/claude" const fixtureRoot = path.join(import.meta.dir, "fixtures", "sample-plugin") @@ -18,7 +19,7 @@ describe("convertClaudeToPi", () => { // Prompts are normalized command names expect(bundle.prompts.some((prompt) => prompt.name === "workflows-review")).toBe(true) - expect(bundle.prompts.some((prompt) => prompt.name === "plan_review")).toBe(true) + expect(bundle.prompts.some((prompt) => prompt.name === "plan-review")).toBe(true) // Commands with disable-model-invocation are excluded expect(bundle.prompts.some((prompt) => prompt.name === "deploy-docs")).toBe(false) @@ -32,10 +33,10 @@ describe("convertClaudeToPi", () => { expect(bundle.skillDirs.some((skill) => skill.name === "skill-one")).toBe(true) expect(bundle.generatedSkills.some((skill) => skill.name === "repo-research-analyst")).toBe(true) - // Pi compatibility extension is included (with subagent + MCPorter tools) + // Pi compatibility extension is included (with ce_subagent + MCPorter tools) const compatExtension = bundle.extensions.find((extension) => extension.name === "compound-engineering-compat.ts") expect(compatExtension).toBeDefined() - expect(compatExtension!.content).toContain('name: "subagent"') + expect(compatExtension!.content).toContain('name: "ce_subagent"') expect(compatExtension!.content).toContain('name: "mcporter_call"') // Claude MCP config is translated to MCPorter config @@ -54,8 +55,8 @@ describe("convertClaudeToPi", () => { description: "Plan workflow", body: [ "Run these in order:", - "- Task repo-research-analyst(feature_description)", - "- Task learnings-researcher(feature_description)", + "- Task compound-engineering:research:repo-research-analyst(feature_description)", + "- Task compound-engineering:research:learnings-researcher(feature_description)", "Use AskUserQuestion tool for follow-up.", "Then use /workflows:work and /prompts:todo-resolve.", "Track progress with TodoWrite and TodoRead.", @@ -77,8 +78,8 @@ describe("convertClaudeToPi", () => { expect(bundle.prompts).toHaveLength(1) const parsedPrompt = parseFrontmatter(bundle.prompts[0].content) - expect(parsedPrompt.body).toContain("Run subagent with agent=\"repo-research-analyst\" and task=\"feature_description\".") - expect(parsedPrompt.body).toContain("Run subagent with agent=\"learnings-researcher\" and task=\"feature_description\".") + expect(parsedPrompt.body).toContain("Run ce_subagent with agent=\"repo-research-analyst\" and task=\"feature_description\".") + expect(parsedPrompt.body).toContain("Run ce_subagent with agent=\"learnings-researcher\" and task=\"feature_description\".") expect(parsedPrompt.body).toContain("ask_user_question") expect(parsedPrompt.body).toContain("/workflows-work") expect(parsedPrompt.body).toContain("/todo-resolve") @@ -114,8 +115,8 @@ describe("convertClaudeToPi", () => { }) const parsedPrompt = parseFrontmatter(bundle.prompts[0].content) - expect(parsedPrompt.body).toContain('Run subagent with agent="repo-research-analyst" and task="feature_description".') - expect(parsedPrompt.body).toContain('Run subagent with agent="security-reviewer" and task="code_diff".') + expect(parsedPrompt.body).toContain('Run ce_subagent with agent="repo-research-analyst" and task="feature_description".') + expect(parsedPrompt.body).toContain('Run ce_subagent with agent="security-reviewer" and task="code_diff".') expect(parsedPrompt.body).not.toContain("compound-engineering:") }) @@ -144,11 +145,135 @@ describe("convertClaudeToPi", () => { }) const parsedPrompt = parseFrontmatter(bundle.prompts[0].content) - expect(parsedPrompt.body).toContain('Run subagent with agent="code-simplicity-reviewer".') + expect(parsedPrompt.body).toContain('Run ce_subagent with agent="code-simplicity-reviewer".') + expect(parsedPrompt.body).not.toContain('task=""') expect(parsedPrompt.body).not.toContain("compound-engineering:") expect(parsedPrompt.body).not.toContain("()") }) + test("normalizes copied skill names to Pi-safe names and avoids collisions", () => { + const plugin: ClaudePlugin = { + root: "/tmp/plugin", + manifest: { name: "fixture", version: "1.0.0" }, + agents: [ + { + name: "ce:plan", + description: "Plan helper", + body: "Agent body", + sourcePath: "/tmp/plugin/agents/ce:plan.md", + }, + ], + commands: [], + skills: [ + { + name: "ce:plan", + sourceDir: "/tmp/plugin/skills/ce:plan", + skillPath: "/tmp/plugin/skills/ce:plan/SKILL.md", + }, + { + name: "generate_command", + sourceDir: "/tmp/plugin/skills/generate_command", + skillPath: "/tmp/plugin/skills/generate_command/SKILL.md", + }, + ], + hooks: undefined, + mcpServers: undefined, + } + + const bundle = convertClaudeToPi(plugin, { + agentMode: "subagent", + inferTemperature: false, + permissions: "none", + }) + + expect(bundle.skillDirs.map((skill) => skill.name)).toEqual(["ce-plan", "generate-command"]) + expect(bundle.generatedSkills[0]?.name).toBe("ce-plan-2") + }) + + test("resolves Task calls to deduped agent names when names collide", () => { + const plugin: ClaudePlugin = { + root: "/tmp/plugin", + manifest: { name: "fixture", version: "1.0.0" }, + agents: [ + { + name: "code-review", + description: "Review 1", + body: "Agent body 1", + sourcePath: "/tmp/plugin/agents/code-review.md", + }, + { + name: "code_review", + description: "Review 2", + body: "Agent body 2", + sourcePath: "/tmp/plugin/agents/code_review.md", + }, + ], + commands: [ + { + name: "review", + description: "Run review", + body: "- Task code-review(feature)\n- Task code_review(feature)", + sourcePath: "/tmp/plugin/commands/review.md", + }, + ], + skills: [], + hooks: undefined, + mcpServers: undefined, + } + + const bundle = convertClaudeToPi(plugin, { + agentMode: "subagent", + inferTemperature: false, + permissions: "none", + }) + + expect(bundle.generatedSkills.map((s) => s.name)).toEqual(["code-review", "code-review-2"]) + + const parsedPrompt = parseFrontmatter(bundle.prompts[0].content) + expect(parsedPrompt.body).toContain('agent="code-review" and task="feature"') + expect(parsedPrompt.body).toContain('agent="code-review-2" and task="feature"') + }) + + test("resolves slash refs to deduped prompt names when command names collide", () => { + const plugin: ClaudePlugin = { + root: "/tmp/plugin", + manifest: { name: "fixture", version: "1.0.0" }, + agents: [], + commands: [ + { + name: "plan-review", + description: "First review", + body: "Run /plan_review after this.", + sourcePath: "/tmp/plugin/commands/plan-review.md", + }, + { + name: "plan_review", + description: "Second review", + body: "Then run /plan-review to continue.", + sourcePath: "/tmp/plugin/commands/plan_review.md", + }, + ], + skills: [], + hooks: undefined, + mcpServers: undefined, + } + + const bundle = convertClaudeToPi(plugin, { + agentMode: "subagent", + inferTemperature: false, + permissions: "none", + }) + + expect(bundle.prompts.map((p) => p.name)).toEqual(["plan-review", "plan-review-2"]) + + const firstPrompt = parseFrontmatter(bundle.prompts[0].content) + expect(firstPrompt.body).toContain("/plan-review-2") + + const secondPrompt = parseFrontmatter(bundle.prompts[1].content) + expect(secondPrompt.body).toContain("/plan-review") + expect(secondPrompt.body).not.toContain("/plan-review-2") + }) + test("appends MCPorter compatibility note when command references MCP", () => { const plugin: ClaudePlugin = { root: "/tmp/plugin", @@ -177,4 +302,80 @@ describe("convertClaudeToPi", () => { expect(parsedPrompt.body).toContain("Pi + MCPorter note") expect(parsedPrompt.body).toContain("mcporter_call") }) + + test("strips outer quotes from Task args to avoid doubled quoting", () => { + const body = `- Task agent("feature description")\n- Task agent('single quoted')\n- Task agent(unquoted args)` + const transformed = transformPiBodyContent(body) + expect(transformed).toContain('task="feature description"') + expect(transformed).toContain('task="single quoted"') + expect(transformed).toContain('task="unquoted args"') + expect(transformed).not.toContain('""') + expect(transformed).not.toContain("''") + }) + + test("does not rewrite 'Run subagent with' in prose context", () => { + const body = "Run subagent with caution when handling large inputs." + const transformed = transformPiBodyContent(body) + expect(transformed).toContain("Run subagent with caution") + expect(transformed).not.toContain("ce_subagent") + }) + + test("rewrites 'Run subagent with agent=' in structured context", () => { + const body = 'Run subagent with agent="repo-research-analyst" and task="research".' + const transformed = transformPiBodyContent(body) + expect(transformed).toContain('Run ce_subagent with agent="repo-research-analyst"') + }) + + test("remaps agent names in existing structured subagent invocations", () => { + const body = 'Run subagent with agent="code_review" and task="feature".' + const transformed = transformPiBodyContent(body, { + agents: { + "code-review": "code-review", + code_review: "code-review-2", + }, + }) + + expect(transformed).toContain('Run ce_subagent with agent="code-review-2" and task="feature".') + expect(transformed).not.toContain('agent="code_review"') + }) + + test("uses sentinel comment for MCPorter note idempotency", () => { + const body = "Use MCP servers for docs lookup." + const transformed = appendCompatibilityNoteIfNeeded(transformPiBodyContent(body)) + expect(transformed).toContain("") + expect(transformed).toContain("## Pi + MCPorter note") + + const doubleTransformed = appendCompatibilityNoteIfNeeded(transformPiBodyContent(transformed)) + expect(doubleTransformed.match(//g)?.length).toBe(1) + expect(doubleTransformed.match(/## Pi \+ MCPorter note/g)?.length).toBe(1) + }) + + test("does not duplicate the MCPorter compatibility note on repeated transforms", () => { + const body = [ + "Use MCP servers for docs lookup.", + "", + "", + "## Pi + MCPorter note", + "For MCP access in Pi, use MCPorter via the generated tools:", + "- `mcporter_list` to inspect available MCP tools", + "- `mcporter_call` to invoke a tool", + "", + ].join("\n") + + const transformed = appendCompatibilityNoteIfNeeded(transformPiBodyContent(body)) + expect(transformed.match(/## Pi \+ MCPorter note/g)?.length ?? 0).toBe(1) + }) + + test("does not rewrite URL hosts while still rewriting actual slash commands", () => { + const body = "See https://figma.com/file/123 and then run /figma." + const transformed = transformPiBodyContent(body, { + prompts: { + figma: "figma-2", + }, + }) + + expect(transformed).toContain("https://figma.com/file/123") + expect(transformed).toContain("/figma-2") + expect(transformed).not.toContain("https://figma-2.com") + }) }) diff --git a/tests/pi-writer.test.ts b/tests/pi-writer.test.ts index ad3e81d7f..a81ec56de 100644 --- a/tests/pi-writer.test.ts +++ b/tests/pi-writer.test.ts @@ -1,9 +1,20 @@ -import { describe, expect, test } from "bun:test" +import { afterEach, describe, expect, spyOn, test } from "bun:test" import { promises as fs } from "fs" import path from "path" import os from "os" import { writePiBundle } from "../src/targets/pi" import type { PiBundle } from "../src/types/pi" +import { resolvePiLayout } from "../src/utils/pi-layout" +import { setAtomicWriteFailureHookForTests, setManagedPathSnapshotHookForTests } from "../src/utils/files" +import { getPiPolicyFingerprint, setPiPolicyFingerprintForTests } from "../src/utils/pi-policy" +import { + createManagedArtifact, + createPiManagedSection, + getPiManagedTrustInfo, + loadPiManagedStateWithTrust, + replacePiManagedSection, + writePiManagedState, +} from "../src/utils/pi-managed" async function exists(filePath: string): Promise { try { @@ -14,7 +25,190 @@ async function exists(filePath: string): Promise { } } +afterEach(() => { + setAtomicWriteFailureHookForTests(null) + setManagedPathSnapshotHookForTests(null) + setPiPolicyFingerprintForTests(null) +}) + describe("writePiBundle", () => { + test("classifies freshly written install manifests as verified for their canonical root", async () => { + const tempRoot = await fs.mkdtemp(path.join(os.tmpdir(), "pi-writer-verified-")) + const stateHome = path.join(tempRoot, "state-home") + process.env.COMPOUND_ENGINEERING_HOME = stateHome + + const outputRoot = path.join(tempRoot, ".pi") + const bundle: PiBundle = { + pluginName: "compound-engineering", + prompts: [{ name: "workflows-plan", content: "Prompt content" }], + skillDirs: [], + generatedSkills: [], + extensions: [], + } + + await writePiBundle(outputRoot, bundle) + + const layout = resolvePiLayout(outputRoot, "install") + const trust = await loadPiManagedStateWithTrust(layout) + expect(trust.status).toBe("verified") + expect(trust.state?.install.artifacts).toHaveLength(1) + + delete process.env.COMPOUND_ENGINEERING_HOME + }) + + test("persists the current Pi policy fingerprint in install managed state", async () => { + const tempRoot = await fs.mkdtemp(path.join(os.tmpdir(), "pi-writer-policy-fingerprint-")) + const stateHome = path.join(tempRoot, "state-home") + process.env.COMPOUND_ENGINEERING_HOME = stateHome + + await writePiBundle(path.join(tempRoot, ".pi"), { + pluginName: "compound-engineering", + prompts: [{ name: "plan-review", content: "Body" }], + skillDirs: [], + generatedSkills: [], + extensions: [], + }) + + const trust = await loadPiManagedStateWithTrust(resolvePiLayout(path.join(tempRoot, ".pi"), "install")) + expect(trust.state?.policyFingerprint).toBe(getPiPolicyFingerprint()) + + delete process.env.COMPOUND_ENGINEERING_HOME + }) + + test("treats install managed state as stale when the policy fingerprint changes", async () => { + const tempRoot = await fs.mkdtemp(path.join(os.tmpdir(), "pi-writer-policy-fingerprint-stale-")) + const stateHome = path.join(tempRoot, "state-home") + const outputRoot = path.join(tempRoot, ".pi") + process.env.COMPOUND_ENGINEERING_HOME = stateHome + + setPiPolicyFingerprintForTests("policy-v1") + await writePiBundle(outputRoot, { + pluginName: "compound-engineering", + prompts: [{ name: "plan-review", content: "Body" }], + skillDirs: [], + generatedSkills: [], + extensions: [], + }) + + setPiPolicyFingerprintForTests("policy-v2") + const trust = await loadPiManagedStateWithTrust(resolvePiLayout(outputRoot, "install")) + expect(trust.status).toBe("stale") + expect(trust.verifiedSections.install).toBe(false) + + delete process.env.COMPOUND_ENGINEERING_HOME + }) + + test("treats only explicit Pi roots as direct install roots", async () => { + const tempRoot = await fs.mkdtemp(path.join(os.tmpdir(), "pi-writer-layout-roots-")) + const previousHome = process.env.HOME + try { + process.env.HOME = tempRoot + const projectPiRoot = path.join(tempRoot, ".pi") + const globalPiRoot = path.join(tempRoot, ".pi", "agent") + const customAgentRoot = path.join(tempRoot, "agent") + + expect(resolvePiLayout(projectPiRoot, "install").root).toBe(projectPiRoot) + expect(resolvePiLayout(globalPiRoot, "install").root).toBe(globalPiRoot) + expect(resolvePiLayout(customAgentRoot, "install").root).toBe(path.join(customAgentRoot, ".pi")) + } finally { + process.env.HOME = previousHome + } + }) + + test("writes custom install roots named agent under the nested .pi layout", async () => { + const tempRoot = await fs.mkdtemp(path.join(os.tmpdir(), "pi-writer-agent-root-")) + const outputRoot = path.join(tempRoot, "agent") + + await writePiBundle(outputRoot, { + pluginName: "compound-engineering", + prompts: [{ name: "workflows-plan", content: "Prompt content" }], + skillDirs: [], + generatedSkills: [], + extensions: [], + }) + + expect(await exists(path.join(outputRoot, ".pi", "prompts", "workflows-plan.md"))).toBe(true) + expect(await exists(path.join(outputRoot, "prompts", "workflows-plan.md"))).toBe(false) + }) + + test("treats malformed machine-key state as unverified", async () => { + const tempRoot = await fs.mkdtemp(path.join(os.tmpdir(), "pi-writer-machine-key-invalid-")) + const stateHome = path.join(tempRoot, "state-home") + process.env.COMPOUND_ENGINEERING_HOME = stateHome + + const outputRoot = path.join(tempRoot, ".pi") + await writePiBundle(outputRoot, { + pluginName: "compound-engineering", + prompts: [{ name: "workflows-plan", content: "Prompt content" }], + skillDirs: [], + generatedSkills: [], + extensions: [], + }) + + const machineKeyPath = path.join(stateHome, ".compound-engineering", "pi-managed-key") + await fs.writeFile(machineKeyPath, "\n") + + const trust = await loadPiManagedStateWithTrust(resolvePiLayout(outputRoot, "install")) + expect(trust.status).toBe("legacy") + + delete process.env.COMPOUND_ENGINEERING_HOME + }) + + test("concurrent machine-key initialization keeps both newly written manifests verified", async () => { + const tempRoot = await fs.mkdtemp(path.join(os.tmpdir(), "pi-writer-machine-key-race-")) + const stateHome = path.join(tempRoot, "state-home") + process.env.COMPOUND_ENGINEERING_HOME = stateHome + + const layoutA = resolvePiLayout(path.join(tempRoot, "workspace-a", ".pi"), "install") + const layoutB = resolvePiLayout(path.join(tempRoot, "workspace-b", ".pi"), "install") + const stateA = replacePiManagedSection(null, "install", createPiManagedSection({ + artifacts: [createManagedArtifact(layoutA, "prompt", "workflow-a", "workflow-a")], + }), "compound-engineering") + const stateB = replacePiManagedSection(null, "install", createPiManagedSection({ + artifacts: [createManagedArtifact(layoutB, "prompt", "workflow-b", "workflow-b")], + }), "compound-engineering") + + await Promise.all([ + writePiManagedState(layoutA, stateA, { install: true, sync: false }), + writePiManagedState(layoutB, stateB, { install: true, sync: false }), + ]) + + const trustA = await loadPiManagedStateWithTrust(layoutA) + const trustB = await loadPiManagedStateWithTrust(layoutB) + expect(trustA.status).toBe("verified") + expect(trustB.status).toBe("verified") + + delete process.env.COMPOUND_ENGINEERING_HOME + }) + + test("treats copied managed manifests as stale when the canonical root changes", async () => { + const tempRoot = await fs.mkdtemp(path.join(os.tmpdir(), "pi-writer-stale-copy-")) + const stateHome = path.join(tempRoot, "state-home") + process.env.COMPOUND_ENGINEERING_HOME = stateHome + + const sourceRoot = path.join(tempRoot, "source", ".pi") + const copiedRoot = path.join(tempRoot, "copied", ".pi") + const bundle: PiBundle = { + pluginName: "compound-engineering", + prompts: [{ name: "workflows-plan", content: "Prompt content" }], + skillDirs: [], + generatedSkills: [], + extensions: [], + } + + await writePiBundle(sourceRoot, bundle) + + const sourceLayout = resolvePiLayout(sourceRoot, "install") + const copiedLayout = resolvePiLayout(copiedRoot, "install") + await fs.mkdir(path.dirname(copiedLayout.managedManifestPath), { recursive: true }) + await fs.copyFile(sourceLayout.managedManifestPath, copiedLayout.managedManifestPath) + + const copiedTrust = await loadPiManagedStateWithTrust(copiedLayout) + expect(copiedTrust.status).toBe("legacy") + + delete process.env.COMPOUND_ENGINEERING_HOME + }) + test("writes prompts, skills, extensions, mcporter config, and AGENTS.md block", async () => { const tempRoot = await fs.mkdtemp(path.join(os.tmpdir(), "pi-writer-")) const outputRoot = path.join(tempRoot, ".pi") @@ -48,6 +242,24 @@ describe("writePiBundle", () => { const agentsContent = await fs.readFile(agentsPath, "utf8") expect(agentsContent).toContain("BEGIN COMPOUND PI TOOL MAP") expect(agentsContent).toContain("MCPorter") + expect(agentsContent).toContain("compound-engineering/mcporter.json (project sync)") + expect(agentsContent).toContain(".pi/compound-engineering/mcporter.json (project install)") + }) + + test("rejects unsafe bundle names before mutating Pi roots", async () => { + const tempRoot = await fs.mkdtemp(path.join(os.tmpdir(), "pi-writer-unsafe-name-")) + const outputRoot = path.join(tempRoot, ".pi") + + await expect(writePiBundle(outputRoot, { + pluginName: "compound-engineering", + prompts: [{ name: "../escape", content: "Prompt content" }], + skillDirs: [], + generatedSkills: [], + extensions: [], + })).rejects.toThrow("Unsafe prompt name") + + await expect(fs.access(path.join(outputRoot, "prompts", "..", "escape.md"))).rejects.toBeDefined() + await expect(fs.access(path.join(outputRoot, "prompts"))).rejects.toBeDefined() }) test("transforms Task calls in copied SKILL.md files", async () => { @@ -72,7 +284,7 @@ Run these research agents: const bundle: PiBundle = { prompts: [], - skillDirs: [{ name: "ce:plan", sourceDir: sourceSkillDir }], + skillDirs: [{ name: "ce-plan", sourceDir: sourceSkillDir }], generatedSkills: [], extensions: [], } @@ -84,56 +296,1898 @@ Run these research agents: "utf8", ) - expect(installedSkill).toContain('Run subagent with agent="repo-research-analyst" and task="feature_description".') - expect(installedSkill).toContain('Run subagent with agent="learnings-researcher" and task="feature_description".') - expect(installedSkill).toContain('Run subagent with agent="code-simplicity-reviewer".') + expect(installedSkill).toContain("name: ce-plan") + expect(installedSkill).toContain('Run ce_subagent with agent="repo-research-analyst" and task="feature_description".') + expect(installedSkill).toContain('Run ce_subagent with agent="learnings-researcher" and task="feature_description".') + expect(installedSkill).toContain('Run ce_subagent with agent="code-simplicity-reviewer".') expect(installedSkill).not.toContain("Task compound-engineering:") }) - test("writes to ~/.pi/agent style roots without nesting under .pi", async () => { + test("writes to explicit ~/.pi/agent roots without nesting under .pi", async () => { const tempRoot = await fs.mkdtemp(path.join(os.tmpdir(), "pi-agent-root-")) - const outputRoot = path.join(tempRoot, "agent") + const previousHome = process.env.HOME + try { + process.env.HOME = tempRoot + const outputRoot = path.join(tempRoot, ".pi", "agent") - const bundle: PiBundle = { + const bundle: PiBundle = { + prompts: [{ name: "workflows-work", content: "Prompt content" }], + skillDirs: [], + generatedSkills: [], + extensions: [], + } + + await writePiBundle(outputRoot, bundle) + + expect(await exists(path.join(outputRoot, "prompts", "workflows-work.md"))).toBe(true) + expect(await exists(path.join(outputRoot, ".pi"))).toBe(false) + } finally { + process.env.HOME = previousHome + } + }) + + test("writes custom install roots under nested .pi layout", async () => { + const tempRoot = await fs.mkdtemp(path.join(os.tmpdir(), "pi-custom-root-")) + const outputRoot = path.join(tempRoot, "custom-root") + + await writePiBundle(outputRoot, { prompts: [{ name: "workflows-work", content: "Prompt content" }], skillDirs: [], generatedSkills: [], extensions: [], - } + }) - await writePiBundle(outputRoot, bundle) + expect(await exists(path.join(outputRoot, ".pi", "prompts", "workflows-work.md"))).toBe(true) + expect(await exists(path.join(outputRoot, "prompts", "workflows-work.md"))).toBe(false) + }) + + test("canonicalizes trailing separators on explicit direct Pi roots", async () => { + const tempRoot = await fs.mkdtemp(path.join(os.tmpdir(), "pi-direct-root-trailing-slash-")) + const outputRoot = path.join(tempRoot, ".pi") + path.sep - expect(await exists(path.join(outputRoot, "prompts", "workflows-work.md"))).toBe(true) - expect(await exists(path.join(outputRoot, ".pi"))).toBe(false) + expect(resolvePiLayout(outputRoot, "install").root).toBe(path.join(tempRoot, ".pi")) + + await writePiBundle(outputRoot, { + pluginName: "compound-engineering", + prompts: [{ name: "workflows-plan", content: "Prompt content" }], + skillDirs: [], + generatedSkills: [], + extensions: [], + }) + + expect(await exists(path.join(tempRoot, ".pi", "prompts", "workflows-plan.md"))).toBe(true) + expect(await exists(path.join(tempRoot, ".pi", ".pi"))).toBe(false) }) - test("backs up existing mcporter config before overwriting", async () => { - const tempRoot = await fs.mkdtemp(path.join(os.tmpdir(), "pi-backup-")) - const outputRoot = path.join(tempRoot, ".pi") - const configPath = path.join(outputRoot, "compound-engineering", "mcporter.json") + test("cleans legacy flat custom-root prompts after writing the canonical nested layout", async () => { + const tempRoot = await fs.mkdtemp(path.join(os.tmpdir(), "pi-custom-root-legacy-prompt-")) + const stateHome = path.join(tempRoot, "state-home") + process.env.COMPOUND_ENGINEERING_HOME = stateHome + const outputRoot = path.join(tempRoot, "custom-root") + const legacyPromptPath = path.join(outputRoot, "prompts", "workflows-work.md") + const directLayout = resolvePiLayout(outputRoot, "sync") - await fs.mkdir(path.dirname(configPath), { recursive: true }) - await fs.writeFile(configPath, JSON.stringify({ previous: true }, null, 2)) + await fs.mkdir(path.dirname(legacyPromptPath), { recursive: true }) + await fs.writeFile(legacyPromptPath, "legacy prompt\n") + let directState = replacePiManagedSection(null, "install", createPiManagedSection({ + artifacts: [createManagedArtifact(directLayout, "prompt", "workflows-work", "workflows-work")], + }), "compound-engineering") + directState = replacePiManagedSection(directState, "sync", createPiManagedSection({ + mcpServers: ["sync-owned"], + })) + await writePiManagedState(directLayout, directState, { install: true, sync: true }) - const bundle: PiBundle = { + await writePiBundle(outputRoot, { + prompts: [{ name: "workflows-work", content: "Prompt content" }], + skillDirs: [], + generatedSkills: [], + extensions: [], + }) + + expect(await exists(path.join(outputRoot, ".pi", "prompts", "workflows-work.md"))).toBe(true) + expect(await exists(legacyPromptPath)).toBe(false) + + delete process.env.COMPOUND_ENGINEERING_HOME + }) + + test("preserves direct-root sync prompts by avoiding legacy install cleanup for sync-owned paths", async () => { + const tempRoot = await fs.mkdtemp(path.join(os.tmpdir(), "pi-custom-root-sync-prompt-")) + const stateHome = path.join(tempRoot, "state-home") + process.env.COMPOUND_ENGINEERING_HOME = stateHome + const outputRoot = path.join(tempRoot, "custom-root") + const legacyPromptPath = path.join(outputRoot, "prompts", "workflows-work.md") + const directLayout = resolvePiLayout(outputRoot, "sync") + + await fs.mkdir(path.dirname(legacyPromptPath), { recursive: true }) + await fs.writeFile(legacyPromptPath, "sync-owned prompt\n") + + let directState = replacePiManagedSection(null, "install", createPiManagedSection({ + artifacts: [createManagedArtifact(directLayout, "prompt", "workflows-work", "workflows-work")], + }), "compound-engineering") + directState = replacePiManagedSection(directState, "sync", createPiManagedSection({ + artifacts: [createManagedArtifact(directLayout, "prompt", "workflows-work", "workflows-work")], + })) + await writePiManagedState(directLayout, directState, { install: true, sync: true }) + + await writePiBundle(outputRoot, { + prompts: [{ name: "workflows-work", content: "Prompt content" }], + skillDirs: [], + generatedSkills: [], + extensions: [], + }) + + expect(await exists(path.join(outputRoot, ".pi", "prompts", "workflows-work.md"))).toBe(true) + expect(await exists(legacyPromptPath)).toBe(true) + + const directTrust = await loadPiManagedStateWithTrust(directLayout) + expect(directTrust.state?.sync.artifacts).toEqual([ + expect.objectContaining({ + emittedName: "workflows-work", + kind: "prompt", + }), + ]) + + delete process.env.COMPOUND_ENGINEERING_HOME + }) + + test("preserves legacy direct-root prompts when install ownership cannot be proven", async () => { + const tempRoot = await fs.mkdtemp(path.join(os.tmpdir(), "pi-custom-root-unverified-install-prompt-")) + const stateHome = path.join(tempRoot, "state-home") + process.env.COMPOUND_ENGINEERING_HOME = stateHome + const outputRoot = path.join(tempRoot, "custom-root") + const legacyPromptPath = path.join(outputRoot, "prompts", "workflows-work.md") + const directLayout = resolvePiLayout(outputRoot, "sync") + + await fs.mkdir(path.dirname(legacyPromptPath), { recursive: true }) + await fs.writeFile(legacyPromptPath, "legacy prompt\n") + + const directState = replacePiManagedSection(null, "sync", createPiManagedSection({ + mcpServers: ["sync-owned"], + }), "compound-engineering") + await writePiManagedState(directLayout, directState, { install: false, sync: true }) + + await writePiBundle(outputRoot, { + prompts: [{ name: "workflows-work", content: "Prompt content" }], + skillDirs: [], + generatedSkills: [], + extensions: [], + }) + + expect(await exists(path.join(outputRoot, ".pi", "prompts", "workflows-work.md"))).toBe(true) + expect(await exists(legacyPromptPath)).toBe(true) + + delete process.env.COMPOUND_ENGINEERING_HOME + }) + + test("removes verified legacy direct-root compat extension for custom-root installs when sync has no claim", async () => { + const tempRoot = await fs.mkdtemp(path.join(os.tmpdir(), "pi-custom-root-legacy-compat-")) + const stateHome = path.join(tempRoot, "state-home") + process.env.COMPOUND_ENGINEERING_HOME = stateHome + const outputRoot = path.join(tempRoot, "custom-root") + const directLayout = resolvePiLayout(outputRoot, "sync") + const legacyCompatPath = path.join(outputRoot, "extensions", "compound-engineering-compat.ts") + + await fs.mkdir(path.dirname(legacyCompatPath), { recursive: true }) + await fs.writeFile(legacyCompatPath, "legacy compat\n") + let directState = replacePiManagedSection(null, "install", createPiManagedSection({ + sharedResources: { compatExtension: true }, + }), "compound-engineering") + directState = replacePiManagedSection(directState, "sync", createPiManagedSection({ + mcpServers: ["sync-owned"], + })) + await writePiManagedState(directLayout, directState, { install: true, sync: true }) + + await writePiBundle(outputRoot, { + prompts: [], + skillDirs: [], + generatedSkills: [], + extensions: [{ name: "compound-engineering-compat.ts", content: "export const compat = true" }], + }) + + expect(await exists(path.join(outputRoot, ".pi", "extensions", "compound-engineering-compat.ts"))).toBe(true) + expect(await exists(legacyCompatPath)).toBe(false) + + delete process.env.COMPOUND_ENGINEERING_HOME + }) + + test("prunes only verified legacy install-owned direct-root mcporter keys for custom-root installs", async () => { + const tempRoot = await fs.mkdtemp(path.join(os.tmpdir(), "pi-custom-root-legacy-mcporter-")) + const stateHome = path.join(tempRoot, "state-home") + process.env.COMPOUND_ENGINEERING_HOME = stateHome + const outputRoot = path.join(tempRoot, "custom-root") + const directLayout = resolvePiLayout(outputRoot, "sync") + + let directState = replacePiManagedSection(null, "install", createPiManagedSection({ + mcpServers: ["install-owned"], + sharedResources: { mcporterConfig: true }, + }), "compound-engineering") + directState = replacePiManagedSection(directState, "sync", createPiManagedSection({ + mcpServers: ["sync-owned"], + sharedResources: { mcporterConfig: true }, + })) + await writePiManagedState(directLayout, directState, { install: true, sync: true }) + await fs.mkdir(path.dirname(directLayout.mcporterConfigPath), { recursive: true }) + await fs.writeFile( + directLayout.mcporterConfigPath, + JSON.stringify({ + mcpServers: { + "install-owned": { command: "install-cmd" }, + "sync-owned": { command: "sync-cmd" }, + unrelated: { command: "user-cmd" }, + }, + }, null, 2) + "\n", + ) + + await writePiBundle(outputRoot, { prompts: [], skillDirs: [], generatedSkills: [], extensions: [], mcporterConfig: { mcpServers: { - linear: { baseUrl: "https://mcp.linear.app/mcp" }, + nested: { command: "nested-cmd" }, }, }, + }) + + expect(await exists(path.join(outputRoot, ".pi", "compound-engineering", "mcporter.json"))).toBe(true) + const currentConfig = JSON.parse(await fs.readFile(directLayout.mcporterConfigPath, "utf8")) as { + mcpServers: Record + } + expect(currentConfig.mcpServers["install-owned"]).toBeUndefined() + expect(currentConfig.mcpServers["sync-owned"]).toBeDefined() + expect(currentConfig.mcpServers.unrelated).toBeDefined() + + delete process.env.COMPOUND_ENGINEERING_HOME + }) + + test("warns and preserves legacy direct-root compat state when sync trust is unavailable", async () => { + const tempRoot = await fs.mkdtemp(path.join(os.tmpdir(), "pi-custom-root-legacy-compat-ambiguous-")) + const stateHome = path.join(tempRoot, "state-home") + process.env.COMPOUND_ENGINEERING_HOME = stateHome + const outputRoot = path.join(tempRoot, "custom-root") + const directLayout = resolvePiLayout(outputRoot, "sync") + const legacyCompatPath = path.join(outputRoot, "extensions", "compound-engineering-compat.ts") + const warnSpy = spyOn(console, "warn").mockImplementation(() => {}) + + let directState = replacePiManagedSection(null, "install", createPiManagedSection({ + sharedResources: { compatExtension: true }, + }), "compound-engineering") + directState = replacePiManagedSection(directState, "sync", createPiManagedSection({ + sharedResources: { compatExtension: true }, + })) + await writePiManagedState(directLayout, directState, { install: true, sync: true }) + + const manifest = JSON.parse(await fs.readFile(directLayout.managedManifestPath, "utf8")) as { + sync?: { sharedResources?: { compatExtension?: boolean } } + } + manifest.sync = { + ...(manifest.sync ?? {}), + sharedResources: { compatExtension: false }, + } + await fs.writeFile(directLayout.managedManifestPath, JSON.stringify(manifest, null, 2) + "\n") + + await fs.mkdir(path.dirname(legacyCompatPath), { recursive: true }) + await fs.writeFile(legacyCompatPath, "legacy compat\n") + + await writePiBundle(outputRoot, { + prompts: [], + skillDirs: [], + generatedSkills: [], + extensions: [{ name: "compound-engineering-compat.ts", content: "export const compat = true" }], + }) + + expect(await fs.readFile(legacyCompatPath, "utf8")).toBe("legacy compat\n") + expect(await exists(path.join(outputRoot, ".pi", "extensions", "compound-engineering-compat.ts"))).toBe(true) + expect(warnSpy).toHaveBeenCalledWith(expect.stringContaining("compound-engineering-compat.ts")) + + warnSpy.mockRestore() + delete process.env.COMPOUND_ENGINEERING_HOME + }) + + test("rewrites copied skill frontmatter names to match Pi-safe directory names", async () => { + const tempRoot = await fs.mkdtemp(path.join(os.tmpdir(), "pi-skill-frontmatter-")) + const outputRoot = path.join(tempRoot, ".pi") + const sourceSkillDir = path.join(tempRoot, "source-skill") + + await fs.mkdir(sourceSkillDir, { recursive: true }) + await fs.writeFile( + path.join(sourceSkillDir, "SKILL.md"), + [ + "---", + "name: generate_command", + "description: Generate a command", + "---", + "", + "# Generate command", + "", + "1. Task compound-engineering:workflow:pr-comment-resolver(comment1)", + ].join("\n"), + ) + + const bundle: PiBundle = { + prompts: [], + skillDirs: [ + { + name: "generate-command", + sourceDir: sourceSkillDir, + }, + ], + generatedSkills: [], + extensions: [], } await writePiBundle(outputRoot, bundle) - const files = await fs.readdir(path.dirname(configPath)) - const backupFileName = files.find((file) => file.startsWith("mcporter.json.bak.")) - expect(backupFileName).toBeDefined() + const copiedSkill = await fs.readFile(path.join(outputRoot, "skills", "generate-command", "SKILL.md"), "utf8") + expect(copiedSkill).toContain("name: generate-command") + expect(copiedSkill).not.toContain("name: generate_command") + expect(copiedSkill).toContain("Run ce_subagent with agent=\"pr-comment-resolver\" and task=\"comment1\".") + }) - const currentConfig = JSON.parse(await fs.readFile(configPath, "utf8")) as { mcpServers: Record } - expect(currentConfig.mcpServers.linear).toBeDefined() + test("uses provided name maps when rewriting copied skills under collisions", async () => { + const tempRoot = await fs.mkdtemp(path.join(os.tmpdir(), "pi-writer-namemaps-")) + const outputRoot = path.join(tempRoot, ".pi") + const sourceSkillDir = path.join(tempRoot, "source-skill") + + await fs.mkdir(sourceSkillDir, { recursive: true }) + await fs.writeFile( + path.join(sourceSkillDir, "SKILL.md"), + [ + "---", + "name: docs-skill", + "description: Uses colliding refs", + "---", + "", + "Task code_review(feature)", + "Run /prompts:plan_review after this.", + ].join("\n"), + ) + + const bundle: PiBundle = { + prompts: [], + skillDirs: [{ name: "docs-skill", sourceDir: sourceSkillDir }], + generatedSkills: [], + extensions: [], + nameMaps: { + agents: { code_review: "code-review-2" }, + prompts: { plan_review: "plan-review-2" }, + }, + } + + await writePiBundle(outputRoot, bundle) + + const copiedSkill = await fs.readFile(path.join(outputRoot, "skills", "docs-skill", "SKILL.md"), "utf8") + expect(copiedSkill).toContain('Run ce_subagent with agent="code-review-2" and task="feature".') + expect(copiedSkill).toContain("/plan-review-2") + }) + + test("rewrites frontmatterless copied skills during Pi bundle writes", async () => { + const tempRoot = await fs.mkdtemp(path.join(os.tmpdir(), "pi-skill-frontmatterless-")) + const outputRoot = path.join(tempRoot, ".pi") + const sourceSkillDir = path.join(tempRoot, "source-skill") + + await fs.mkdir(sourceSkillDir, { recursive: true }) + await fs.writeFile( + path.join(sourceSkillDir, "SKILL.md"), + [ + "# Frontmatterless skill", + "", + "- Task compound-engineering:research:repo-research-analyst(feature_description)", + ].join("\n"), + ) + + const bundle: PiBundle = { + prompts: [], + skillDirs: [{ name: "frontmatterless-skill", sourceDir: sourceSkillDir }], + generatedSkills: [], + extensions: [], + } + + await writePiBundle(outputRoot, bundle) + + const installedSkill = await fs.readFile(path.join(outputRoot, "skills", "frontmatterless-skill", "SKILL.md"), "utf8") + expect(installedSkill).toContain('Run ce_subagent with agent="repo-research-analyst" and task="feature_description".') + expect(installedSkill).not.toContain("name:") + }) + + test("does not rematerialize an already-converged frontmatterless skill during Pi bundle writes", async () => { + const tempRoot = await fs.mkdtemp(path.join(os.tmpdir(), "pi-frontmatterless-stable-")) + const outputRoot = path.join(tempRoot, ".pi") + const sourceSkillDir = path.join(tempRoot, "source-skill") + + await fs.mkdir(sourceSkillDir, { recursive: true }) + await fs.writeFile( + path.join(sourceSkillDir, "SKILL.md"), + [ + "# Frontmatterless skill", + "", + "- Task compound-engineering:research:repo-research-analyst(feature_description)", + ].join("\n"), + ) + + const bundle: PiBundle = { + prompts: [], + skillDirs: [{ name: "frontmatterless-skill", sourceDir: sourceSkillDir }], + generatedSkills: [], + extensions: [], + } + + await writePiBundle(outputRoot, bundle) + const skillsDir = path.join(outputRoot, "skills") + const before = await fs.readdir(skillsDir) + + await writePiBundle(outputRoot, bundle) + const after = await fs.readdir(skillsDir) + + expect(before.filter((entry) => entry.startsWith("frontmatterless-skill.bak."))).toHaveLength(0) + expect(after.filter((entry) => entry.startsWith("frontmatterless-skill.bak."))).toHaveLength(0) + }) + + test("regenerates valid frontmatter for malformed copied skills during Pi bundle writes", async () => { + const tempRoot = await fs.mkdtemp(path.join(os.tmpdir(), "pi-skill-malformed-")) + const outputRoot = path.join(tempRoot, ".pi") + const sourceSkillDir = path.join(tempRoot, "source-skill") + + await fs.mkdir(sourceSkillDir, { recursive: true }) + await fs.writeFile( + path.join(sourceSkillDir, "SKILL.md"), + [ + "---", + "name: [broken", + "description: broken frontmatter", + "---", + "", + "# Broken skill", + "", + "- Task compound-engineering:research:repo-research-analyst(feature_description)", + ].join("\n"), + ) + + const bundle: PiBundle = { + prompts: [], + skillDirs: [{ name: "broken-skill", sourceDir: sourceSkillDir }], + generatedSkills: [], + extensions: [], + } + + await writePiBundle(outputRoot, bundle) + + const installedSkill = await fs.readFile(path.join(outputRoot, "skills", "broken-skill", "SKILL.md"), "utf8") + expect(installedSkill).toContain('Run ce_subagent with agent="repo-research-analyst" and task="feature_description".') + expect(installedSkill).toContain("name: broken-skill") + expect(installedSkill).not.toContain("name: [broken") + }) + + test("does not rematerialize an already-converged malformed skill during Pi bundle writes", async () => { + const tempRoot = await fs.mkdtemp(path.join(os.tmpdir(), "pi-malformed-stable-")) + const outputRoot = path.join(tempRoot, ".pi") + const sourceSkillDir = path.join(tempRoot, "source-skill") + + await fs.mkdir(sourceSkillDir, { recursive: true }) + await fs.writeFile( + path.join(sourceSkillDir, "SKILL.md"), + [ + "---", + "name: [broken", + "description: broken frontmatter", + "---", + "", + "# Broken skill", + "", + "- Task compound-engineering:research:repo-research-analyst(feature_description)", + ].join("\n"), + ) + + const bundle: PiBundle = { + prompts: [], + skillDirs: [{ name: "broken-skill", sourceDir: sourceSkillDir }], + generatedSkills: [], + extensions: [], + } + + await writePiBundle(outputRoot, bundle) + const skillsDir = path.join(outputRoot, "skills") + const before = await fs.readdir(skillsDir) + + await writePiBundle(outputRoot, bundle) + const after = await fs.readdir(skillsDir) + + expect(before.filter((entry) => entry.startsWith("broken-skill.bak."))).toHaveLength(0) + expect(after.filter((entry) => entry.startsWith("broken-skill.bak."))).toHaveLength(0) + }) + + test("does not append MCPorter compatibility note to copied skills", async () => { + const tempRoot = await fs.mkdtemp(path.join(os.tmpdir(), "pi-skill-no-mcporter-note-")) + const outputRoot = path.join(tempRoot, ".pi") + const sourceSkillDir = path.join(tempRoot, "source-skill") + + await fs.mkdir(sourceSkillDir, { recursive: true }) + await fs.writeFile( + path.join(sourceSkillDir, "SKILL.md"), + [ + "---", + "name: docs-skill", + "description: Uses MCP and Task", + "---", + "", + "Use MCP servers for docs lookup.", + "Task compound-engineering:research:repo-research-analyst(feature_description)", + ].join("\n"), + ) + + const bundle: PiBundle = { + prompts: [], + skillDirs: [{ name: "docs-skill", sourceDir: sourceSkillDir }], + generatedSkills: [], + extensions: [], + } + + await writePiBundle(outputRoot, bundle) + + const copiedSkill = await fs.readFile(path.join(outputRoot, "skills", "docs-skill", "SKILL.md"), "utf8") + expect(copiedSkill).toContain("Use MCP servers for docs lookup.") + expect(copiedSkill).toContain('Run ce_subagent with agent="repo-research-analyst" and task="feature_description".') + expect(copiedSkill).not.toContain("Pi + MCPorter note") + expect(copiedSkill).not.toContain("") + }) + + test("skips dangling symlinked file assets during Pi bundle writes", async () => { + const tempRoot = await fs.mkdtemp(path.join(os.tmpdir(), "pi-skill-dangling-symlink-")) + const outputRoot = path.join(tempRoot, ".pi") + const sourceSkillDir = path.join(tempRoot, "source-skill") + const missingAssetPath = path.join(tempRoot, "missing.txt") + + await fs.mkdir(sourceSkillDir, { recursive: true }) + await fs.symlink(missingAssetPath, path.join(sourceSkillDir, "asset.txt")) + await fs.writeFile( + path.join(sourceSkillDir, "SKILL.md"), + [ + "---", + "name: ce:plan", + "description: Planning workflow", + "---", + "", + "- Task compound-engineering:research:repo-research-analyst(feature_description)", + ].join("\n"), + ) + + const bundle: PiBundle = { + prompts: [], + skillDirs: [{ name: "ce-plan", sourceDir: sourceSkillDir }], + generatedSkills: [], + extensions: [], + } + + await writePiBundle(outputRoot, bundle) + + const installedSkill = await fs.readFile(path.join(outputRoot, "skills", "ce-plan", "SKILL.md"), "utf8") + expect(installedSkill).toContain('Run ce_subagent with agent="repo-research-analyst" and task="feature_description".') + expect(await exists(path.join(outputRoot, "skills", "ce-plan", "asset.txt"))).toBe(false) + + const skillsDir = path.join(outputRoot, "skills") + const before = await fs.readdir(skillsDir) + await writePiBundle(outputRoot, bundle) + const after = await fs.readdir(skillsDir) + + expect(before.filter((entry) => entry.startsWith("ce-plan.bak."))).toHaveLength(0) + expect(after.filter((entry) => entry.startsWith("ce-plan.bak."))).toHaveLength(0) + }) + + test("preserves nested frontmatter objects when rewriting copied Pi skills", async () => { + const tempRoot = await fs.mkdtemp(path.join(os.tmpdir(), "pi-skill-nested-frontmatter-")) + const outputRoot = path.join(tempRoot, ".pi") + const sourceSkillDir = path.join(tempRoot, "source-skill") + + await fs.mkdir(sourceSkillDir, { recursive: true }) + await fs.writeFile( + path.join(sourceSkillDir, "SKILL.md"), + [ + "---", + "name: nested_skill", + "description: Nested metadata", + "metadata:", + " owner: dragos", + " flags:", + " sync: true", + "---", + "", + "# Nested skill", + "", + "No Pi rewrite needed.", + ].join("\n"), + ) + + const bundle: PiBundle = { + prompts: [], + skillDirs: [ + { + name: "nested-skill", + sourceDir: sourceSkillDir, + }, + ], + generatedSkills: [], + extensions: [], + } + + await writePiBundle(outputRoot, bundle) + + const copiedSkill = await fs.readFile(path.join(outputRoot, "skills", "nested-skill", "SKILL.md"), "utf8") + expect(copiedSkill).toContain("name: nested-skill") + expect(copiedSkill).toContain("metadata:\n owner: dragos\n flags:\n sync: true") + expect(copiedSkill).not.toContain("[object Object]") + }) + + test("copies symlinked file assets when Pi skill materialization is required", async () => { + const tempRoot = await fs.mkdtemp(path.join(os.tmpdir(), "pi-skill-symlink-asset-")) + const outputRoot = path.join(tempRoot, ".pi") + const sourceSkillDir = path.join(tempRoot, "source-skill") + const sharedAssetPath = path.join(sourceSkillDir, "shared.txt") + + await fs.mkdir(sourceSkillDir, { recursive: true }) + await fs.writeFile(sharedAssetPath, "shared asset\n") + await fs.symlink(sharedAssetPath, path.join(sourceSkillDir, "asset.txt")) + await fs.writeFile( + path.join(sourceSkillDir, "SKILL.md"), + [ + "---", + "name: ce:plan", + "description: Planning workflow", + "---", + "", + "- Task compound-engineering:research:repo-research-analyst(feature_description)", + ].join("\n"), + ) + + const bundle: PiBundle = { + prompts: [], + skillDirs: [{ name: "ce-plan", sourceDir: sourceSkillDir }], + generatedSkills: [], + extensions: [], + } + + await writePiBundle(outputRoot, bundle) + + const copiedAsset = await fs.readFile(path.join(outputRoot, "skills", "ce-plan", "asset.txt"), "utf8") + expect(copiedAsset).toBe("shared asset\n") + }) + + test("skips symlinked file assets that escape the skill root during Pi bundle writes", async () => { + const tempRoot = await fs.mkdtemp(path.join(os.tmpdir(), "pi-skill-escaped-symlink-")) + const outputRoot = path.join(tempRoot, ".pi") + const sourceSkillDir = path.join(tempRoot, "source-skill") + const externalAssetDir = path.join(tempRoot, "shared") + const externalAssetPath = path.join(externalAssetDir, "shared.txt") + + await fs.mkdir(sourceSkillDir, { recursive: true }) + await fs.mkdir(externalAssetDir, { recursive: true }) + await fs.writeFile(externalAssetPath, "shared asset\n") + await fs.symlink(externalAssetPath, path.join(sourceSkillDir, "asset.txt")) + await fs.writeFile( + path.join(sourceSkillDir, "SKILL.md"), + [ + "---", + "name: ce:plan", + "description: Planning workflow", + "---", + "", + "- Task compound-engineering:research:repo-research-analyst(feature_description)", + ].join("\n"), + ) + + const bundle: PiBundle = { + prompts: [], + skillDirs: [{ name: "ce-plan", sourceDir: sourceSkillDir }], + generatedSkills: [], + extensions: [], + } + + await writePiBundle(outputRoot, bundle) + + expect(await exists(path.join(outputRoot, "skills", "ce-plan", "asset.txt"))).toBe(false) + }) + + test("rejects swapped passthrough file targets during Pi bundle writes", async () => { + const tempRoot = await fs.mkdtemp(path.join(os.tmpdir(), "pi-skill-file-swap-")) + const outputRoot = path.join(tempRoot, ".pi") + const sourceSkillDir = path.join(tempRoot, "source-skill") + const protectedFile = path.join(tempRoot, "protected.txt") + const targetFile = path.join(outputRoot, "skills", "ce-plan", "asset.txt") + + await fs.mkdir(sourceSkillDir, { recursive: true }) + await fs.writeFile(protectedFile, "protected\n") + await fs.writeFile(path.join(sourceSkillDir, "asset.txt"), "updated asset\n") + await fs.writeFile(path.join(sourceSkillDir, "SKILL.md"), "---\nname: ce:plan\n---\n") + await fs.mkdir(path.dirname(targetFile), { recursive: true }) + await fs.writeFile(targetFile, "original asset\n") + + setAtomicWriteFailureHookForTests(async (filePath, stage) => { + if (filePath === targetFile && stage === "beforeRename") { + await fs.unlink(targetFile) + await fs.symlink(protectedFile, targetFile) + } + }) + + await expect(writePiBundle(outputRoot, { + prompts: [], + skillDirs: [{ name: "ce-plan", sourceDir: sourceSkillDir }], + generatedSkills: [], + extensions: [], + })).rejects.toThrow(/Refusing to write through symlink target|Refusing to restore through symlink target|ENOENT/) + + expect(await fs.readFile(protectedFile, "utf8")).toBe("protected\n") + expect((await fs.lstat(targetFile)).isSymbolicLink()).toBe(true) + }) + + test("removes stale generated-agent directories after normalization changes", async () => { + const tempRoot = await fs.mkdtemp(path.join(os.tmpdir(), "pi-writer-stale-generated-agent-")) + const outputRoot = path.join(tempRoot, ".pi") + + const firstBundle: PiBundle = { + pluginName: "compound-engineering", + prompts: [], + skillDirs: [], + generatedSkills: [{ name: "ce-plan", sourceName: "ce:plan", content: "---\nname: ce-plan\n---\n\nBody" }], + extensions: [], + nameMaps: { agents: { "ce:plan": "ce-plan" } }, + } + + await writePiBundle(outputRoot, firstBundle) + expect(await exists(path.join(outputRoot, "skills", "ce-plan", "SKILL.md"))).toBe(true) + + const secondBundle: PiBundle = { + pluginName: "compound-engineering", + prompts: [], + skillDirs: [], + generatedSkills: [{ name: "ce-plan-2", sourceName: "ce:plan", content: "---\nname: ce-plan-2\n---\n\nBody" }], + extensions: [], + nameMaps: { agents: { "ce:plan": "ce-plan-2" } }, + } + + await writePiBundle(outputRoot, secondBundle) + + expect(await exists(path.join(outputRoot, "skills", "ce-plan-2", "SKILL.md"))).toBe(true) + expect(await exists(path.join(outputRoot, "skills", "ce-plan", "SKILL.md"))).toBe(false) + }) + + test("removes deleted managed prompts and generated-agent directories", async () => { + const tempRoot = await fs.mkdtemp(path.join(os.tmpdir(), "pi-writer-delete-managed-artifacts-")) + const outputRoot = path.join(tempRoot, ".pi") + const managedManifestPath = path.join(outputRoot, "compound-engineering", "compound-engineering-managed.json") + + await writePiBundle(outputRoot, { + pluginName: "compound-engineering", + prompts: [{ name: "plan-review", sourceName: "workflows:plan-review", content: "Prompt body" }], + skillDirs: [], + generatedSkills: [{ name: "ce-plan", sourceName: "ce:plan", content: "---\nname: ce-plan\n---\n\nBody" }], + extensions: [], + nameMaps: { + prompts: { "workflows:plan-review": "plan-review" }, + agents: { "ce:plan": "ce-plan" }, + }, + }) + + expect(await exists(path.join(outputRoot, "prompts", "plan-review.md"))).toBe(true) + expect(await exists(path.join(outputRoot, "skills", "ce-plan", "SKILL.md"))).toBe(true) + + await writePiBundle(outputRoot, { + pluginName: "compound-engineering", + prompts: [], + skillDirs: [], + generatedSkills: [], + extensions: [], + }) + + expect(await exists(path.join(outputRoot, "prompts", "plan-review.md"))).toBe(false) + expect(await exists(path.join(outputRoot, "skills", "ce-plan", "SKILL.md"))).toBe(false) + expect(await exists(managedManifestPath)).toBe(false) + }) + + test("removes stale nested files when a skill changes from copied to generated at the same emitted path", async () => { + const tempRoot = await fs.mkdtemp(path.join(os.tmpdir(), "pi-writer-kind-transition-")) + const outputRoot = path.join(tempRoot, ".pi") + const sourceSkillDir = path.join(tempRoot, "source-skill") + + await fs.mkdir(path.join(sourceSkillDir, "nested"), { recursive: true }) + await fs.writeFile(path.join(sourceSkillDir, "SKILL.md"), "---\nname: ce-plan\n---\n") + await fs.writeFile(path.join(sourceSkillDir, "nested", "extra.txt"), "extra\n") + + await writePiBundle(outputRoot, { + pluginName: "compound-engineering", + prompts: [], + skillDirs: [{ name: "ce-plan", sourceDir: sourceSkillDir }], + generatedSkills: [], + extensions: [], + }) + + expect(await exists(path.join(outputRoot, "skills", "ce-plan", "nested", "extra.txt"))).toBe(true) + + await writePiBundle(outputRoot, { + pluginName: "compound-engineering", + prompts: [], + skillDirs: [], + generatedSkills: [{ name: "ce-plan", content: "---\nname: ce-plan\n---\n\nGenerated\n" }], + extensions: [], + }) + + expect(await exists(path.join(outputRoot, "skills", "ce-plan", "nested", "extra.txt"))).toBe(false) + expect(await fs.readFile(path.join(outputRoot, "skills", "ce-plan", "SKILL.md"), "utf8")).toContain("Generated") + }) + + test("does not pre-delete a same-path skill directory claimed only by an unverified manifest", async () => { + const tempRoot = await fs.mkdtemp(path.join(os.tmpdir(), "pi-writer-unverified-kind-transition-")) + const stateHome = path.join(tempRoot, "state-home") + process.env.COMPOUND_ENGINEERING_HOME = stateHome + + const outputRoot = path.join(tempRoot, ".pi") + const layout = resolvePiLayout(outputRoot, "install") + const targetSkillDir = path.join(layout.skillsDir, "ce-plan") + + await fs.mkdir(path.join(targetSkillDir, "nested"), { recursive: true }) + await fs.writeFile(path.join(targetSkillDir, "nested", "keep.txt"), "keep\n") + + const seededState = replacePiManagedSection(null, "install", createPiManagedSection({ + artifacts: [createManagedArtifact(layout, "copied-skill", "ce-plan", "ce-plan")], + }), "compound-engineering") + await writePiManagedState(layout, seededState, { install: true, sync: false }) + + const manifest = JSON.parse(await fs.readFile(layout.managedManifestPath, "utf8")) as { + install?: { sharedResources?: { compatExtension?: boolean } } + } + manifest.install = { + ...(manifest.install ?? {}), + sharedResources: { compatExtension: true }, + } + await fs.writeFile(layout.managedManifestPath, JSON.stringify(manifest, null, 2) + "\n") + + await writePiBundle(outputRoot, { + pluginName: "compound-engineering", + prompts: [], + skillDirs: [], + generatedSkills: [{ name: "ce-plan", content: "---\nname: ce-plan\n---\n\nGenerated\n" }], + extensions: [], + }) + + expect(await exists(path.join(targetSkillDir, "nested", "keep.txt"))).toBe(true) + expect(await fs.readFile(path.join(targetSkillDir, "SKILL.md"), "utf8")).toContain("Generated") + + delete process.env.COMPOUND_ENGINEERING_HOME + }) + + test("restores prior managed state when stale skill cleanup fails after publication work", async () => { + const tempRoot = await fs.mkdtemp(path.join(os.tmpdir(), "pi-writer-stale-cleanup-rollback-")) + const stateHome = path.join(tempRoot, "state-home") + process.env.COMPOUND_ENGINEERING_HOME = stateHome + + const outputRoot = path.join(tempRoot, ".pi") + const layout = resolvePiLayout(outputRoot, "install") + const skillsParent = layout.skillsDir + const externalSkillsParent = path.join(tempRoot, "external-skills") + const externalOldSkillDir = path.join(externalSkillsParent, "old-skill") + + await fs.mkdir(path.dirname(skillsParent), { recursive: true }) + await fs.mkdir(externalOldSkillDir, { recursive: true }) + await fs.writeFile(path.join(externalOldSkillDir, "SKILL.md"), "old\n") + await fs.symlink(externalSkillsParent, skillsParent) + + const seededState = replacePiManagedSection(null, "install", createPiManagedSection({ + artifacts: [createManagedArtifact(layout, "copied-skill", "old-skill", "old-skill")], + }), "compound-engineering") + await writePiManagedState(layout, seededState, { install: true, sync: false }) + + await expect(writePiBundle(outputRoot, { + pluginName: "compound-engineering", + prompts: [{ name: "new-note", content: "new body" }], + skillDirs: [], + generatedSkills: [], + extensions: [], + })).rejects.toThrow("symlinked ancestor") + + const restored = await loadPiManagedStateWithTrust(layout) + expect(restored.status).toBe("verified") + expect(restored.state?.install.artifacts.map((artifact) => artifact.emittedName)).toContain("old-skill") + expect(restored.state?.install.artifacts.map((artifact) => artifact.emittedName)).not.toContain("new-note") + expect(await exists(path.join(layout.promptsDir, "new-note.md"))).toBe(false) + + delete process.env.COMPOUND_ENGINEERING_HOME + }) + + test("rejects cyclic directory symlinks during Pi skill materialization", async () => { + const tempRoot = await fs.mkdtemp(path.join(os.tmpdir(), "pi-skill-cycle-")) + const outputRoot = path.join(tempRoot, ".pi") + const sourceSkillDir = path.join(tempRoot, "source-skill") + + await fs.mkdir(sourceSkillDir, { recursive: true }) + await fs.symlink(sourceSkillDir, path.join(sourceSkillDir, "loop")) + await fs.writeFile( + path.join(sourceSkillDir, "SKILL.md"), + [ + "---", + "name: ce:plan", + "description: Planning workflow", + "---", + "", + "- Task compound-engineering:research:repo-research-analyst(feature_description)", + ].join("\n"), + ) + + const bundle: PiBundle = { + prompts: [], + skillDirs: [{ name: "ce-plan", sourceDir: sourceSkillDir }], + generatedSkills: [], + extensions: [], + } + + await expect(writePiBundle(outputRoot, bundle)).rejects.toThrow("cyclic directory symlink") + }) + + test("backs up existing mcporter config before overwriting", async () => { + const tempRoot = await fs.mkdtemp(path.join(os.tmpdir(), "pi-backup-")) + const outputRoot = path.join(tempRoot, ".pi") + const configPath = path.join(outputRoot, "compound-engineering", "mcporter.json") + + await fs.mkdir(path.dirname(configPath), { recursive: true }) + await fs.writeFile(configPath, JSON.stringify({ previous: true }, null, 2)) + + const bundle: PiBundle = { + prompts: [], + skillDirs: [], + generatedSkills: [], + extensions: [], + mcporterConfig: { + mcpServers: { + linear: { baseUrl: "https://mcp.linear.app/mcp" }, + }, + }, + } + + await writePiBundle(outputRoot, bundle) + + const files = await fs.readdir(path.dirname(configPath)) + const backupFileName = files.find((file) => file.startsWith("mcporter.json.bak.")) + expect(backupFileName).toBeDefined() + + const currentConfig = JSON.parse(await fs.readFile(configPath, "utf8")) as { mcpServers: Record } + expect(currentConfig.mcpServers.linear).toBeDefined() + }) + + test("records install-owned MCP server keys in managed state", async () => { + const tempRoot = await fs.mkdtemp(path.join(os.tmpdir(), "pi-writer-install-mcp-ownership-")) + const stateHome = path.join(tempRoot, "state-home") + process.env.COMPOUND_ENGINEERING_HOME = stateHome + + const outputRoot = path.join(tempRoot, ".pi") + await writePiBundle(outputRoot, { + pluginName: "compound-engineering", + prompts: [], + skillDirs: [], + generatedSkills: [], + extensions: [], + mcporterConfig: { + mcpServers: { + installA: { command: "cmd-a" }, + installB: { command: "cmd-b" }, + }, + }, + }) + + const trust = await loadPiManagedStateWithTrust(resolvePiLayout(outputRoot, "install")) + expect(trust.state?.install.mcpServers).toEqual(["installA", "installB"]) + + delete process.env.COMPOUND_ENGINEERING_HOME + }) + + test("preserves malformed unverified install mcporter config when install wants to write MCP servers", async () => { + const tempRoot = await fs.mkdtemp(path.join(os.tmpdir(), "pi-writer-malformed-unverified-install-mcp-")) + const stateHome = path.join(tempRoot, "state-home") + process.env.COMPOUND_ENGINEERING_HOME = stateHome + const warnSpy = spyOn(console, "warn").mockImplementation(() => {}) + + const outputRoot = path.join(tempRoot, ".pi") + const layout = resolvePiLayout(outputRoot, "install") + await fs.mkdir(path.dirname(layout.mcporterConfigPath), { recursive: true }) + await fs.writeFile(layout.mcporterConfigPath, "{ not json\n") + + await writePiBundle(outputRoot, { + pluginName: "compound-engineering", + prompts: [], + skillDirs: [], + generatedSkills: [], + extensions: [], + mcporterConfig: { + mcpServers: { + context7: { command: "context7" }, + }, + }, + }) + + expect(await fs.readFile(layout.mcporterConfigPath, "utf8")).toContain("{ not json") + expect(warnSpy).toHaveBeenCalledWith(expect.stringContaining("leaving it untouched because install ownership cannot be proven")) + + const trust = await loadPiManagedStateWithTrust(layout) + expect(trust.status).toBe("missing") + expect(trust.state).toBeNull() + + warnSpy.mockRestore() + delete process.env.COMPOUND_ENGINEERING_HOME + }) + + test("classifies old verification hashes as stale when shared resource flags become trust-relevant", async () => { + const tempRoot = await fs.mkdtemp(path.join(os.tmpdir(), "pi-writer-stale-shared-resources-")) + const stateHome = path.join(tempRoot, "state-home") + process.env.COMPOUND_ENGINEERING_HOME = stateHome + + const outputRoot = path.join(tempRoot, ".pi") + const layout = resolvePiLayout(outputRoot, "install") + const legacyState = replacePiManagedSection(null, "install", createPiManagedSection({ + nameMaps: { + skills: { "compound-engineering:ce-plan": "ce-plan" }, + }, + sharedResources: { + compatExtension: false, + }, + }), "compound-engineering") + + await writePiManagedState(layout, legacyState, { install: true, sync: false }) + + const manifest = JSON.parse(await fs.readFile(layout.managedManifestPath, "utf8")) as { + install?: { sharedResources?: { compatExtension?: boolean } } + } + manifest.install = { + ...(manifest.install ?? {}), + sharedResources: { compatExtension: true }, + } + await fs.writeFile(layout.managedManifestPath, JSON.stringify(manifest, null, 2) + "\n") + + const trust = await loadPiManagedStateWithTrust(layout) + expect(trust.status).toBe("stale") + expect(trust.verifiedSections.install).toBe(false) + + delete process.env.COMPOUND_ENGINEERING_HOME + }) + + test("removes stale install-owned mcporter servers while preserving sync-owned and user-owned keys", async () => { + const tempRoot = await fs.mkdtemp(path.join(os.tmpdir(), "pi-writer-shared-mcporter-cleanup-")) + const stateHome = path.join(tempRoot, "state-home") + process.env.COMPOUND_ENGINEERING_HOME = stateHome + + const outputRoot = path.join(tempRoot, ".pi") + const layout = resolvePiLayout(outputRoot, "install") + let seededState = replacePiManagedSection(null, "install", createPiManagedSection({ + mcpServers: ["install-owned"], + sharedResources: { mcporterConfig: true }, + }), "compound-engineering") + seededState = replacePiManagedSection(seededState, "sync", createPiManagedSection({ + mcpServers: ["sync-owned"], + sharedResources: { mcporterConfig: true }, + })) + + await writePiManagedState(layout, seededState, { install: true, sync: true }) + await fs.mkdir(path.dirname(layout.mcporterConfigPath), { recursive: true }) + await fs.writeFile( + layout.mcporterConfigPath, + JSON.stringify({ + mcpServers: { + "install-owned": { command: "install-cmd" }, + "sync-owned": { command: "sync-cmd" }, + unrelated: { command: "user-cmd" }, + }, + }, null, 2) + "\n", + ) + + await writePiBundle(outputRoot, { + pluginName: "compound-engineering", + prompts: [], + skillDirs: [], + generatedSkills: [], + extensions: [], + }) + + const currentConfig = JSON.parse(await fs.readFile(layout.mcporterConfigPath, "utf8")) as { + mcpServers: Record + } + expect(currentConfig.mcpServers["install-owned"]).toBeUndefined() + expect(currentConfig.mcpServers["sync-owned"]).toBeDefined() + expect(currentConfig.mcpServers.unrelated).toBeDefined() + + delete process.env.COMPOUND_ENGINEERING_HOME + }) + + test("reports missing trust info when no managed manifest exists", async () => { + const tempRoot = await fs.mkdtemp(path.join(os.tmpdir(), "pi-writer-missing-trust-")) + const layout = resolvePiLayout(path.join(tempRoot, ".pi"), "install") + + const trust = await getPiManagedTrustInfo(layout) + expect(trust.status).toBe("missing") + expect(trust.isVerified).toBe(false) + }) + + test("does not rewrite unchanged managed manifest on no-op install reruns", async () => { + const tempRoot = await fs.mkdtemp(path.join(os.tmpdir(), "pi-writer-noop-manifest-")) + const stateHome = path.join(tempRoot, "state-home") + process.env.COMPOUND_ENGINEERING_HOME = stateHome + + const outputRoot = path.join(tempRoot, ".pi") + const bundle: PiBundle = { + pluginName: "compound-engineering", + prompts: [{ name: "workflows-plan", content: "Prompt content" }], + skillDirs: [], + generatedSkills: [], + extensions: [], + } + + await writePiBundle(outputRoot, bundle) + + const layout = resolvePiLayout(outputRoot, "install") + const firstManifest = await fs.stat(layout.managedManifestPath) + const firstVerification = await fs.readFile(layout.verificationPath, "utf8") + + await new Promise((resolve) => setTimeout(resolve, 15)) + await writePiBundle(outputRoot, bundle) + + const secondManifest = await fs.stat(layout.managedManifestPath) + const secondVerification = await fs.readFile(layout.verificationPath, "utf8") + expect(secondManifest.mtimeMs).toBe(firstManifest.mtimeMs) + expect(secondVerification).toBe(firstVerification) + + delete process.env.COMPOUND_ENGINEERING_HOME + }) + + test("does not snapshot unchanged shared install files on no-op reruns", async () => { + const tempRoot = await fs.mkdtemp(path.join(os.tmpdir(), "pi-writer-noop-shared-snapshots-")) + const stateHome = path.join(tempRoot, "state-home") + process.env.COMPOUND_ENGINEERING_HOME = stateHome + + const outputRoot = path.join(tempRoot, ".pi") + await writePiBundle(outputRoot, { + pluginName: "compound-engineering", + prompts: [], + skillDirs: [], + generatedSkills: [], + extensions: [], + }) + + const layout = resolvePiLayout(outputRoot, "install") + const snapshottedPaths: string[] = [] + setManagedPathSnapshotHookForTests((targetPath) => { + snapshottedPaths.push(targetPath) + }) + + await writePiBundle(outputRoot, { + pluginName: "compound-engineering", + prompts: [], + skillDirs: [], + generatedSkills: [], + extensions: [], + }) + + expect(snapshottedPaths).not.toContain(layout.agentsPath) + expect(snapshottedPaths).not.toContain(layout.managedManifestPath) + expect(snapshottedPaths).not.toContain(layout.verificationPath) + + delete process.env.COMPOUND_ENGINEERING_HOME + }) + + test("does not create rollback temp dirs on no-op install reruns for unchanged copied skills", async () => { + const tempRoot = await fs.mkdtemp(path.join(os.tmpdir(), "pi-writer-noop-skill-rerun-")) + const stateHome = path.join(tempRoot, "state-home") + process.env.COMPOUND_ENGINEERING_HOME = stateHome + + const outputRoot = path.join(tempRoot, ".pi") + const sourceSkillDir = path.join(tempRoot, "source-skill") + await fs.mkdir(path.join(sourceSkillDir, "nested"), { recursive: true }) + await fs.writeFile(path.join(sourceSkillDir, "SKILL.md"), "---\nname: docs-skill\n---\n\nBody\n") + await fs.writeFile(path.join(sourceSkillDir, "nested", "stable.txt"), "stable\n") + + await writePiBundle(outputRoot, { + pluginName: "compound-engineering", + prompts: [], + skillDirs: [{ name: "docs-skill", sourceDir: sourceSkillDir }], + generatedSkills: [], + extensions: [], + }) + + const layout = resolvePiLayout(outputRoot, "install") + await new Promise((resolve) => setTimeout(resolve, 15)) + await writePiBundle(outputRoot, { + pluginName: "compound-engineering", + prompts: [], + skillDirs: [{ name: "docs-skill", sourceDir: sourceSkillDir }], + generatedSkills: [], + extensions: [], + }) + + expect((await fs.readdir(layout.root)).some((entry) => entry.startsWith(".pi-publish-rollback-"))).toBe(false) + + delete process.env.COMPOUND_ENGINEERING_HOME + }) + + test("does not snapshot unchanged copied skill directories on no-op install reruns", async () => { + const tempRoot = await fs.mkdtemp(path.join(os.tmpdir(), "pi-writer-noop-skill-snapshot-")) + const stateHome = path.join(tempRoot, "state-home") + process.env.COMPOUND_ENGINEERING_HOME = stateHome + + const outputRoot = path.join(tempRoot, ".pi") + const sourceSkillDir = path.join(tempRoot, "source-skill") + await fs.mkdir(path.join(sourceSkillDir, "nested"), { recursive: true }) + await fs.writeFile(path.join(sourceSkillDir, "SKILL.md"), "---\nname: docs-skill\n---\n\nBody\n") + await fs.writeFile(path.join(sourceSkillDir, "nested", "stable.txt"), "stable\n") + + await writePiBundle(outputRoot, { + pluginName: "compound-engineering", + prompts: [], + skillDirs: [{ name: "docs-skill", sourceDir: sourceSkillDir }], + generatedSkills: [], + extensions: [], + }) + + const targetSkillDir = path.join(resolvePiLayout(outputRoot, "install").skillsDir, "docs-skill") + const snapshottedPaths: string[] = [] + setManagedPathSnapshotHookForTests((targetPath) => { + snapshottedPaths.push(targetPath) + }) + + await writePiBundle(outputRoot, { + pluginName: "compound-engineering", + prompts: [], + skillDirs: [{ name: "docs-skill", sourceDir: sourceSkillDir }], + generatedSkills: [], + extensions: [], + }) + + expect(snapshottedPaths).not.toContain(targetSkillDir) + + delete process.env.COMPOUND_ENGINEERING_HOME + }) + + test("rejects symlinked AGENTS.md targets during Pi bundle writes", async () => { + const tempRoot = await fs.mkdtemp(path.join(os.tmpdir(), "pi-writer-agents-symlink-")) + const outputRoot = path.join(tempRoot, ".pi") + const externalTarget = path.join(tempRoot, "external-agents.md") + const agentsPath = path.join(outputRoot, "AGENTS.md") + + await fs.mkdir(outputRoot, { recursive: true }) + await fs.writeFile(externalTarget, "external agents\n") + await fs.symlink(externalTarget, agentsPath) + + await expect(writePiBundle(outputRoot, { + pluginName: "compound-engineering", + prompts: [], + skillDirs: [], + generatedSkills: [], + extensions: [], + })).rejects.toThrow("Refusing to snapshot symlink target") + + expect(await fs.readFile(externalTarget, "utf8")).toBe("external agents\n") + }) + + test("restores prior AGENTS.md content when managed block publication fails", async () => { + const tempRoot = await fs.mkdtemp(path.join(os.tmpdir(), "pi-writer-agents-rollback-")) + const outputRoot = path.join(tempRoot, ".pi") + const agentsPath = path.join(outputRoot, "AGENTS.md") + + await fs.mkdir(outputRoot, { recursive: true }) + await fs.writeFile(agentsPath, "# Existing agents\n") + setAtomicWriteFailureHookForTests((filePath, stage) => { + if (filePath === agentsPath && stage === "beforeRename") { + throw new Error("simulated AGENTS failure") + } + }) + + await expect(writePiBundle(outputRoot, { + pluginName: "compound-engineering", + prompts: [], + skillDirs: [], + generatedSkills: [], + extensions: [], + })).rejects.toThrow("simulated AGENTS failure") + + expect(await fs.readFile(agentsPath, "utf8")).toBe("# Existing agents\n") + }) + + test("keeps the prior verified install state when AGENTS publication fails", async () => { + const tempRoot = await fs.mkdtemp(path.join(os.tmpdir(), "pi-writer-agents-trust-boundary-")) + const stateHome = path.join(tempRoot, "state-home") + process.env.COMPOUND_ENGINEERING_HOME = stateHome + + const outputRoot = path.join(tempRoot, ".pi") + const layout = resolvePiLayout(outputRoot, "install") + + await writePiBundle(outputRoot, { + pluginName: "compound-engineering", + prompts: [{ name: "old-plan", content: "Old prompt" }], + skillDirs: [], + generatedSkills: [], + extensions: [], + }) + + await fs.writeFile(layout.agentsPath, "# Existing agents\n") + + setAtomicWriteFailureHookForTests((filePath, stage) => { + if (filePath === layout.agentsPath && stage === "beforeRename") { + throw new Error("simulated AGENTS failure") + } + }) + + await expect(writePiBundle(outputRoot, { + pluginName: "compound-engineering", + prompts: [{ name: "new-plan", content: "New prompt" }], + skillDirs: [], + generatedSkills: [], + extensions: [], + })).rejects.toThrow("simulated AGENTS failure") + + const trust = await loadPiManagedStateWithTrust(layout) + expect(trust.status).toBe("verified") + expect(trust.state?.install.artifacts.map((artifact) => artifact.emittedName)).toEqual(["old-plan"]) + expect(await exists(path.join(layout.promptsDir, "old-plan.md"))).toBe(true) + expect(await exists(path.join(layout.promptsDir, "new-plan.md"))).toBe(false) + + delete process.env.COMPOUND_ENGINEERING_HOME + }) + + test("removes verification metadata and rollback temp dirs when an install bundle becomes empty", async () => { + const tempRoot = await fs.mkdtemp(path.join(os.tmpdir(), "pi-writer-empty-install-cleanup-")) + const stateHome = path.join(tempRoot, "state-home") + process.env.COMPOUND_ENGINEERING_HOME = stateHome + + const outputRoot = path.join(tempRoot, ".pi") + const layout = resolvePiLayout(outputRoot, "install") + + await writePiBundle(outputRoot, { + pluginName: "compound-engineering", + prompts: [{ name: "old-plan", content: "Old prompt" }], + skillDirs: [], + generatedSkills: [], + extensions: [], + }) + + expect(await exists(layout.managedManifestPath)).toBe(true) + expect(await exists(layout.verificationPath)).toBe(true) + + await writePiBundle(outputRoot, { + pluginName: "compound-engineering", + prompts: [], + skillDirs: [], + generatedSkills: [], + extensions: [], + }) + + expect(await exists(layout.managedManifestPath)).toBe(false) + expect(await exists(layout.verificationPath)).toBe(false) + expect((await fs.readdir(layout.root)).some((entry) => entry.startsWith(".pi-publish-rollback-"))).toBe(false) + + delete process.env.COMPOUND_ENGINEERING_HOME + }) + + test("removes newly written prompts when managed state commit fails", async () => { + const tempRoot = await fs.mkdtemp(path.join(os.tmpdir(), "pi-writer-commit-rollback-prompt-")) + const stateHome = path.join(tempRoot, "state-home") + process.env.COMPOUND_ENGINEERING_HOME = stateHome + + const outputRoot = path.join(tempRoot, ".pi") + const layout = resolvePiLayout(outputRoot, "install") + + await writePiBundle(outputRoot, { + pluginName: "compound-engineering", + prompts: [{ name: "old-plan", content: "Old prompt" }], + skillDirs: [], + generatedSkills: [], + extensions: [], + }) + + setAtomicWriteFailureHookForTests((filePath, stage) => { + if (filePath === layout.managedManifestPath && stage === "beforeRename") { + throw new Error("simulated manifest failure") + } + }) + + await expect(writePiBundle(outputRoot, { + pluginName: "compound-engineering", + prompts: [{ name: "new-plan", content: "New prompt" }], + skillDirs: [], + generatedSkills: [], + extensions: [], + })).rejects.toThrow("simulated manifest failure") + + const trust = await loadPiManagedStateWithTrust(layout) + expect(trust.status).toBe("verified") + expect(trust.state?.install.artifacts.map((artifact) => artifact.emittedName)).toEqual(["old-plan"]) + expect(await exists(path.join(layout.promptsDir, "old-plan.md"))).toBe(true) + expect(await exists(path.join(layout.promptsDir, "new-plan.md"))).toBe(false) + + delete process.env.COMPOUND_ENGINEERING_HOME + }) + + test("removes the canonical compat extension when install no longer owns it", async () => { + const tempRoot = await fs.mkdtemp(path.join(os.tmpdir(), "pi-writer-install-compat-removal-")) + const stateHome = path.join(tempRoot, "state-home") + process.env.COMPOUND_ENGINEERING_HOME = stateHome + + const outputRoot = path.join(tempRoot, ".pi") + const compatPath = path.join(outputRoot, "extensions", "compound-engineering-compat.ts") + + await writePiBundle(outputRoot, { + pluginName: "compound-engineering", + prompts: [], + skillDirs: [], + generatedSkills: [], + extensions: [{ name: "compound-engineering-compat.ts", content: "export const compat = true" }], + }) + + expect(await exists(compatPath)).toBe(true) + + await writePiBundle(outputRoot, { + pluginName: "compound-engineering", + prompts: [], + skillDirs: [], + generatedSkills: [], + extensions: [], + }) + + expect(await exists(compatPath)).toBe(false) + const agents = await fs.readFile(path.join(outputRoot, "AGENTS.md"), "utf8") + expect(agents).not.toContain("ce_subagent") + expect(agents).toContain("compat tools are not currently installed") + expect((await loadPiManagedStateWithTrust(resolvePiLayout(outputRoot, "install"))).state?.install.sharedResources.compatExtension ?? false).toBe(false) + + delete process.env.COMPOUND_ENGINEERING_HOME + }) + + test("preserves the canonical compat extension when verified sync state still owns it", async () => { + const tempRoot = await fs.mkdtemp(path.join(os.tmpdir(), "pi-writer-install-compat-shared-root-")) + const stateHome = path.join(tempRoot, "state-home") + process.env.COMPOUND_ENGINEERING_HOME = stateHome + + const outputRoot = path.join(tempRoot, ".pi") + const layout = resolvePiLayout(outputRoot, "install") + const compatPath = path.join(layout.extensionsDir, "compound-engineering-compat.ts") + + let seededState = replacePiManagedSection(null, "install", createPiManagedSection({ + sharedResources: { compatExtension: true }, + }), "compound-engineering") + seededState = replacePiManagedSection(seededState, "sync", createPiManagedSection({ + sharedResources: { compatExtension: true }, + })) + + await fs.mkdir(layout.extensionsDir, { recursive: true }) + await fs.writeFile(compatPath, "sync compat\n") + await writePiManagedState(layout, seededState, { install: true, sync: true }) + + await writePiBundle(outputRoot, { + pluginName: "compound-engineering", + prompts: [], + skillDirs: [], + generatedSkills: [], + extensions: [], + }) + + expect(await fs.readFile(compatPath, "utf8")).toBe("sync compat\n") + + delete process.env.COMPOUND_ENGINEERING_HOME + }) + + test("removes the live compat extension when sync ownership is not verified", async () => { + const tempRoot = await fs.mkdtemp(path.join(os.tmpdir(), "pi-writer-unverified-sync-compat-")) + const stateHome = path.join(tempRoot, "state-home") + process.env.COMPOUND_ENGINEERING_HOME = stateHome + + const outputRoot = path.join(tempRoot, ".pi") + const layout = resolvePiLayout(outputRoot, "install") + const compatPath = path.join(layout.extensionsDir, "compound-engineering-compat.ts") + + let seededState = replacePiManagedSection(null, "install", createPiManagedSection({ + sharedResources: { compatExtension: true }, + }), "compound-engineering") + seededState = replacePiManagedSection(seededState, "sync", createPiManagedSection({ + sharedResources: { compatExtension: true }, + })) + + await fs.mkdir(layout.extensionsDir, { recursive: true }) + await fs.writeFile(compatPath, "unverified sync compat\n") + await writePiManagedState(layout, seededState, { install: true, sync: false }) + + await writePiBundle(outputRoot, { + pluginName: "compound-engineering", + prompts: [], + skillDirs: [], + generatedSkills: [], + extensions: [], + }) + + await expect(fs.access(compatPath)).rejects.toBeDefined() + const agents = await fs.readFile(path.join(outputRoot, "AGENTS.md"), "utf8") + expect(agents).not.toContain("ce_subagent") + expect(agents).toContain("compat tools are not currently installed") + + delete process.env.COMPOUND_ENGINEERING_HOME + }) + + test("removes the live compat extension and disables advertising when sync ownership at the canonical root is not verified", async () => { + const tempRoot = await fs.mkdtemp(path.join(os.tmpdir(), "pi-writer-unverified-sync-compat-")) + const stateHome = path.join(tempRoot, "state-home") + process.env.COMPOUND_ENGINEERING_HOME = stateHome + + const outputRoot = path.join(tempRoot, ".pi") + const layout = resolvePiLayout(outputRoot, "install") + const compatPath = path.join(layout.extensionsDir, "compound-engineering-compat.ts") + + await writePiBundle(outputRoot, { + pluginName: "compound-engineering", + prompts: [], + skillDirs: [], + generatedSkills: [], + extensions: [{ name: "compound-engineering-compat.ts", content: "sync compat\n" }], + }) + + const manifest = JSON.parse(await fs.readFile(layout.managedManifestPath, "utf8")) as { + version: number + pluginName?: string + sync?: { sharedResources?: { compatExtension?: boolean } } + } + manifest.sync = { sharedResources: { compatExtension: true } } + await fs.writeFile(layout.managedManifestPath, JSON.stringify(manifest, null, 2)) + + await writePiBundle(outputRoot, { + pluginName: "compound-engineering", + prompts: [], + skillDirs: [], + generatedSkills: [], + extensions: [], + }) + + await expect(fs.access(compatPath)).rejects.toBeDefined() + const agents = await fs.readFile(path.join(outputRoot, "AGENTS.md"), "utf8") + expect(agents).not.toContain("ce_subagent") + expect(agents).toContain("compat tools are not currently installed") + + delete process.env.COMPOUND_ENGINEERING_HOME + }) + + test("documents that ce_subagent cwd must remain inside the active workspace", async () => { + const tempRoot = await fs.mkdtemp(path.join(os.tmpdir(), "pi-writer-agents-cwd-doc-")) + const stateHome = path.join(tempRoot, "state-home") + process.env.COMPOUND_ENGINEERING_HOME = stateHome + + await writePiBundle(path.join(tempRoot, ".pi"), { + pluginName: "compound-engineering", + prompts: [], + skillDirs: [], + generatedSkills: [], + extensions: [{ name: "compound-engineering-compat.ts", content: "export const compat = true" }], + }) + + const agents = await fs.readFile(path.join(tempRoot, ".pi", "AGENTS.md"), "utf8") + expect(agents).toContain("ce_subagent cwd must stay within the active workspace root") + expect(agents).toContain("ce_run_prompt") + expect(agents).toContain("MCPorter configPath overrides are ignored") + expect(agents).toContain("foreign qualified Task refs remain rejected unless the compat runtime explicitly verifies a dispatchable namespace") + expect(agents).toContain("ce_list_capabilities") + + delete process.env.COMPOUND_ENGINEERING_HOME + }) + + test("install and sync shared-resource transitions stay aligned on AGENTS and compat outcomes", async () => { + const tempRoot = await fs.mkdtemp(path.join(os.tmpdir(), "pi-writer-sync-parity-matrix-")) + const stateHome = path.join(tempRoot, "state-home") + process.env.COMPOUND_ENGINEERING_HOME = stateHome + + const outputRoot = path.join(tempRoot, ".pi") + const layout = resolvePiLayout(outputRoot, "install") + const compatPath = path.join(layout.extensionsDir, "compound-engineering-compat.ts") + + let seededState = replacePiManagedSection(null, "install", createPiManagedSection({ + sharedResources: { compatExtension: false }, + }), "compound-engineering") + seededState = replacePiManagedSection(seededState, "sync", createPiManagedSection({ + sharedResources: { compatExtension: true }, + })) + + await fs.mkdir(layout.extensionsDir, { recursive: true }) + await fs.writeFile(compatPath, "sync compat\n") + await writePiManagedState(layout, seededState, { install: false, sync: true }) + + await writePiBundle(outputRoot, { + pluginName: "compound-engineering", + prompts: [], + skillDirs: [], + generatedSkills: [], + extensions: [], + }) + + expect(await fs.readFile(compatPath, "utf8")).toBe("sync compat\n") + let agents = await fs.readFile(path.join(outputRoot, "AGENTS.md"), "utf8") + expect(agents).toContain("ce_subagent") + + seededState = replacePiManagedSection(null, "install", createPiManagedSection({ + sharedResources: { compatExtension: false }, + }), "compound-engineering") + seededState = replacePiManagedSection(seededState, "sync", createPiManagedSection({ + sharedResources: { compatExtension: true }, + })) + await fs.writeFile(compatPath, "stale sync compat\n") + await writePiManagedState(layout, seededState, { install: false, sync: false }) + + await writePiBundle(outputRoot, { + pluginName: "compound-engineering", + prompts: [], + skillDirs: [], + generatedSkills: [], + extensions: [], + }) + + await expect(fs.access(compatPath)).rejects.toBeDefined() + agents = await fs.readFile(path.join(outputRoot, "AGENTS.md"), "utf8") + expect(agents).not.toContain("ce_subagent") + expect(agents).toContain("compat tools are not currently installed") + + delete process.env.COMPOUND_ENGINEERING_HOME + }) + + test("preserves shared mcporter keys when sync ownership at the canonical root is not verified", async () => { + const tempRoot = await fs.mkdtemp(path.join(os.tmpdir(), "pi-writer-unverified-sync-mcp-")) + const stateHome = path.join(tempRoot, "state-home") + process.env.COMPOUND_ENGINEERING_HOME = stateHome + + const outputRoot = path.join(tempRoot, ".pi") + const layout = resolvePiLayout(outputRoot, "install") + + await writePiBundle(outputRoot, { + pluginName: "compound-engineering", + prompts: [], + skillDirs: [], + generatedSkills: [], + extensions: [], + mcporterConfig: { + mcpServers: { + context7: { baseUrl: "https://mcp.context7.com/mcp" }, + }, + }, + }) + + const manifest = JSON.parse(await fs.readFile(layout.managedManifestPath, "utf8")) as { + version: number + pluginName?: string + sync?: { mcpServers?: string[]; sharedResources?: { mcporterConfig?: boolean } } + } + manifest.sync = { + mcpServers: ["context7"], + sharedResources: { mcporterConfig: true }, + } + await fs.writeFile(layout.managedManifestPath, JSON.stringify(manifest, null, 2)) + + await writePiBundle(outputRoot, { + pluginName: "compound-engineering", + prompts: [], + skillDirs: [], + generatedSkills: [], + extensions: [], + }) + + const currentConfig = JSON.parse(await fs.readFile(layout.mcporterConfigPath, "utf8")) as { + mcpServers?: Record + } + expect(currentConfig.mcpServers?.context7).toBeDefined() + + delete process.env.COMPOUND_ENGINEERING_HOME + }) + + test("changing one file in a materialized skill updates content without rewriting unrelated files", async () => { + const tempRoot = await fs.mkdtemp(path.join(os.tmpdir(), "pi-writer-partial-skill-update-")) + const outputRoot = path.join(tempRoot, ".pi") + const sourceSkillDir = path.join(tempRoot, "source-skill") + + await fs.mkdir(path.join(sourceSkillDir, "nested"), { recursive: true }) + await fs.writeFile( + path.join(sourceSkillDir, "SKILL.md"), + [ + "---", + "name: docs-skill", + "description: Docs skill", + "---", + "", + "Original body", + ].join("\n"), + ) + await fs.writeFile(path.join(sourceSkillDir, "nested", "stable.txt"), "stable\n") + + await writePiBundle(outputRoot, { + pluginName: "compound-engineering", + prompts: [], + skillDirs: [{ name: "docs-skill", sourceDir: sourceSkillDir }], + generatedSkills: [], + extensions: [], + }) + + const targetSkillDir = path.join(outputRoot, "skills", "docs-skill") + const stablePath = path.join(targetSkillDir, "nested", "stable.txt") + const stableBefore = await fs.readFile(stablePath, "utf8") + const stableStatBefore = await fs.stat(stablePath) + await new Promise((resolve) => setTimeout(resolve, 15)) + await fs.writeFile( + path.join(sourceSkillDir, "SKILL.md"), + [ + "---", + "name: docs-skill", + "description: Docs skill", + "---", + "", + "Updated body", + ].join("\n"), + ) + + await writePiBundle(outputRoot, { + pluginName: "compound-engineering", + prompts: [], + skillDirs: [{ name: "docs-skill", sourceDir: sourceSkillDir }], + generatedSkills: [], + extensions: [], + }) + + const stableAfter = await fs.readFile(stablePath, "utf8") + const stableStatAfter = await fs.stat(stablePath) + expect(await fs.readFile(path.join(targetSkillDir, "SKILL.md"), "utf8")).toContain("Updated body") + expect(stableAfter).toBe(stableBefore) + expect(stableStatAfter.mtimeMs).toBe(stableStatBefore.mtimeMs) + + const skillEntries = await fs.readdir(path.join(outputRoot, "skills")) + expect(skillEntries.some((entry) => entry.startsWith("docs-skill.bak."))).toBe(false) + }) + + test("root-level file add and remove stay on the incremental skill update path", async () => { + const tempRoot = await fs.mkdtemp(path.join(os.tmpdir(), "pi-writer-root-delta-incremental-")) + const outputRoot = path.join(tempRoot, ".pi") + const sourceSkillDir = path.join(tempRoot, "source-skill") + + await fs.mkdir(path.join(sourceSkillDir, "nested"), { recursive: true }) + await fs.writeFile(path.join(sourceSkillDir, "SKILL.md"), "---\nname: docs-skill\n---\n\nBody\n") + await fs.writeFile(path.join(sourceSkillDir, "nested", "stable.txt"), "stable\n") + + await writePiBundle(outputRoot, { + pluginName: "compound-engineering", + prompts: [], + skillDirs: [{ name: "docs-skill", sourceDir: sourceSkillDir }], + generatedSkills: [], + extensions: [], + }) + + const targetSkillDir = path.join(outputRoot, "skills", "docs-skill") + const stablePath = path.join(targetSkillDir, "nested", "stable.txt") + const stableStatBefore = await fs.stat(stablePath) + + await new Promise((resolve) => setTimeout(resolve, 15)) + await fs.writeFile(path.join(sourceSkillDir, "README.md"), "root add\n") + await writePiBundle(outputRoot, { + pluginName: "compound-engineering", + prompts: [], + skillDirs: [{ name: "docs-skill", sourceDir: sourceSkillDir }], + generatedSkills: [], + extensions: [], + }) + + expect(await fs.readFile(path.join(targetSkillDir, "README.md"), "utf8")).toBe("root add\n") + expect((await fs.readdir(path.join(outputRoot, "skills"))).some((entry) => entry.startsWith("docs-skill.bak."))).toBe(false) + + await new Promise((resolve) => setTimeout(resolve, 15)) + await fs.unlink(path.join(sourceSkillDir, "README.md")) + await writePiBundle(outputRoot, { + pluginName: "compound-engineering", + prompts: [], + skillDirs: [{ name: "docs-skill", sourceDir: sourceSkillDir }], + generatedSkills: [], + extensions: [], + }) + + await expect(fs.access(path.join(targetSkillDir, "README.md"))).rejects.toBeDefined() + expect((await fs.readdir(path.join(outputRoot, "skills"))).some((entry) => entry.startsWith("docs-skill.bak."))).toBe(false) + expect((await fs.stat(stablePath)).mtimeMs).toBe(stableStatBefore.mtimeMs) + }) + + test("removes stale nested entries from a materialized skill without creating a backup", async () => { + const tempRoot = await fs.mkdtemp(path.join(os.tmpdir(), "pi-writer-stale-skill-entry-")) + const outputRoot = path.join(tempRoot, ".pi") + const sourceSkillDir = path.join(tempRoot, "source-skill") + + await fs.mkdir(path.join(sourceSkillDir, "nested", "remove-me"), { recursive: true }) + await fs.writeFile(path.join(sourceSkillDir, "SKILL.md"), "---\nname: docs-skill\ndescription: Docs skill\n---\n\nBody\n") + await fs.writeFile(path.join(sourceSkillDir, "nested", "keep.txt"), "keep\n") + await fs.writeFile(path.join(sourceSkillDir, "nested", "remove-me", "gone.txt"), "gone\n") + + await writePiBundle(outputRoot, { + pluginName: "compound-engineering", + prompts: [], + skillDirs: [{ name: "docs-skill", sourceDir: sourceSkillDir }], + generatedSkills: [], + extensions: [], + }) + + await fs.rm(path.join(sourceSkillDir, "nested", "remove-me"), { recursive: true, force: true }) + + await writePiBundle(outputRoot, { + pluginName: "compound-engineering", + prompts: [], + skillDirs: [{ name: "docs-skill", sourceDir: sourceSkillDir }], + generatedSkills: [], + extensions: [], + }) + + const targetSkillDir = path.join(outputRoot, "skills", "docs-skill") + expect(await exists(path.join(targetSkillDir, "nested", "keep.txt"))).toBe(true) + expect(await exists(path.join(targetSkillDir, "nested", "remove-me"))).toBe(false) + + const skillEntries = await fs.readdir(path.join(outputRoot, "skills")) + expect(skillEntries.some((entry) => entry.startsWith("docs-skill.bak."))).toBe(false) + }) + + test("falls back to whole-directory replacement for nested file-to-directory transitions", async () => { + const tempRoot = await fs.mkdtemp(path.join(os.tmpdir(), "pi-writer-shape-fallback-")) + const outputRoot = path.join(tempRoot, ".pi") + const sourceSkillDir = path.join(tempRoot, "source-skill") + + await fs.mkdir(sourceSkillDir, { recursive: true }) + await fs.writeFile(path.join(sourceSkillDir, "SKILL.md"), "---\nname: docs-skill\ndescription: Docs skill\n---\n\nBody\n") + await fs.writeFile(path.join(sourceSkillDir, "nested"), "file first\n") + + await writePiBundle(outputRoot, { + pluginName: "compound-engineering", + prompts: [], + skillDirs: [{ name: "docs-skill", sourceDir: sourceSkillDir }], + generatedSkills: [], + extensions: [], + }) + + await fs.rm(path.join(sourceSkillDir, "nested"), { force: true }) + await fs.mkdir(path.join(sourceSkillDir, "nested"), { recursive: true }) + await fs.writeFile(path.join(sourceSkillDir, "nested", "child.txt"), "child\n") + + await writePiBundle(outputRoot, { + pluginName: "compound-engineering", + prompts: [], + skillDirs: [{ name: "docs-skill", sourceDir: sourceSkillDir }], + generatedSkills: [], + extensions: [], + }) + + const targetSkillDir = path.join(outputRoot, "skills", "docs-skill") + expect(await exists(path.join(targetSkillDir, "nested", "child.txt"))).toBe(true) + + const skillEntries = await fs.readdir(path.join(outputRoot, "skills")) + expect(skillEntries.some((entry) => entry.startsWith("docs-skill.bak."))).toBe(true) + }) + + test("does not delete prompts claimed only by an unverified manifest", async () => { + const tempRoot = await fs.mkdtemp(path.join(os.tmpdir(), "pi-writer-forged-cleanup-")) + const outputRoot = path.join(tempRoot, ".pi") + const layout = resolvePiLayout(outputRoot, "install") + + await fs.mkdir(path.dirname(layout.managedManifestPath), { recursive: true }) + await fs.mkdir(layout.promptsDir, { recursive: true }) + await fs.writeFile(path.join(layout.promptsDir, "user-owned.md"), "user prompt\n") + await fs.writeFile( + layout.managedManifestPath, + JSON.stringify({ + version: 1, + install: { + artifacts: [ + { + kind: "prompt", + sourceName: "forged", + emittedName: "user-owned", + relativePath: path.join("prompts", "user-owned.md"), + }, + ], + }, + }, null, 2), + ) + + await writePiBundle(outputRoot, { + pluginName: "compound-engineering", + prompts: [], + skillDirs: [], + generatedSkills: [], + extensions: [], + }) + + expect(await exists(path.join(layout.promptsDir, "user-owned.md"))).toBe(true) }) }) diff --git a/tests/plugin-path.test.ts b/tests/plugin-path.test.ts index 4ede45126..bf3c5520d 100644 --- a/tests/plugin-path.test.ts +++ b/tests/plugin-path.test.ts @@ -292,4 +292,70 @@ describe("plugin-path", () => { expect(exitCode).not.toBe(0) expect(stderr).toContain("Plugin directory not found") }) + + test("rejects unsafe plugin names before deriving checkout paths", async () => { + const repoRoot = await createTestRepo() + const tempHome = await fs.mkdtemp(path.join(os.tmpdir(), "plugin-path-unsafe-plugin-")) + + const proc = Bun.spawn([ + "bun", + "run", + path.join(projectRoot, "src", "index.ts"), + "plugin-path", + "../escape", + "--branch", + "main", + ], { + cwd: projectRoot, + stdout: "pipe", + stderr: "pipe", + env: { + ...gitEnv, + HOME: tempHome, + COMPOUND_PLUGIN_GITHUB_SOURCE: repoRoot, + }, + }) + + const exitCode = await proc.exited + const stderr = await new Response(proc.stderr).text() + expect(exitCode).not.toBe(0) + expect(stderr).toContain("Unsafe plugin name") + }) + + test("rejects symlinked cache checkouts before running git", async () => { + const repoRoot = await createTestRepo() + const tempHome = await fs.mkdtemp(path.join(os.tmpdir(), "plugin-path-symlink-cache-")) + const cacheRoot = path.join(tempHome, ".cache", "compound-engineering", "branches") + const externalRoot = path.join(tempHome, "external-checkout") + const symlinkedCheckout = path.join(cacheRoot, "compound-engineering-main") + + await fs.mkdir(cacheRoot, { recursive: true }) + await fs.mkdir(externalRoot, { recursive: true }) + await fs.symlink(externalRoot, symlinkedCheckout) + + const proc = Bun.spawn([ + "bun", + "run", + path.join(projectRoot, "src", "index.ts"), + "plugin-path", + "compound-engineering", + "--branch", + "main", + ], { + cwd: projectRoot, + stdout: "pipe", + stderr: "pipe", + env: { + ...gitEnv, + HOME: tempHome, + COMPOUND_PLUGIN_GITHUB_SOURCE: repoRoot, + }, + }) + + const exitCode = await proc.exited + const stderr = await new Response(proc.stderr).text() + + expect(exitCode).not.toBe(0) + expect(stderr).toContain("symlink") + }) }) diff --git a/tests/sync-pi.test.ts b/tests/sync-pi.test.ts index 6459e65a8..8be8e1975 100644 --- a/tests/sync-pi.test.ts +++ b/tests/sync-pi.test.ts @@ -1,14 +1,231 @@ -import { describe, expect, test } from "bun:test" +import { afterEach, describe, expect, spyOn, test } from "bun:test" +import { createHash } from "crypto" import { promises as fs } from "fs" import path from "path" import os from "os" -import { syncToPi } from "../src/sync/pi" +import { pathToFileURL } from "url" +import { classifyUnsupportedPiSyncStatus } from "../src/sync/pi-artifact-status" +import { setPiSyncPassHookForTests, setPiSyncRerunModeForTests, syncToPi } from "../src/sync/pi" +import { setPiSyncCommandConversionHookForTests } from "../src/sync/commands" +import { loadClaudeHome } from "../src/parsers/claude-home" import type { ClaudeHomeConfig } from "../src/parsers/claude-home" +import { PI_COMPAT_EXTENSION_SOURCE } from "../src/templates/pi/compat-extension" +import { writePiBundle } from "../src/targets/pi" +import { setAtomicWriteFailureHookForTests, setManagedPathSnapshotHookForTests } from "../src/utils/files" +import { resolvePiLayout } from "../src/utils/pi-layout" +import { createManagedArtifact, createPiManagedSection, createPiManagedSectionHashPayload, loadPiManagedStateWithTrust, replacePiManagedSection, writePiManagedState } from "../src/utils/pi-managed" +import { isSafePiManagedName } from "../src/utils/pi-managed" +import { getPiPolicyFingerprint, setPiPolicyFingerprintForTests } from "../src/utils/pi-policy" +import { normalizePiSkillName, setPiSkillFullCompareHookForTests, setPiSkillSourceAnalysisHookForTests, setPiSkillSourceFingerprintHookForTests, uniquePiSkillName } from "../src/utils/pi-skills" +import { derivePiSharedResourceContract } from "../src/utils/pi-trust-contract" + +async function seedVerifiedInstallNameMaps( + outputRoot: string, + nameMaps: { + agents?: Record + skills?: Record + prompts?: Record + }, +): Promise { + const layout = resolvePiLayout(outputRoot, "sync") + const state = replacePiManagedSection(null, "install", createPiManagedSection({ nameMaps }), "compound-engineering") + await writePiManagedState(layout, state, { install: true, sync: false }) +} + +async function rewriteManifestWithMatchingVerification( + layout: ReturnType, + sectionName: "install" | "sync", + manifestMutator: (manifest: any) => any, + effectiveSection: ReturnType, +) { + const manifest = manifestMutator(JSON.parse(await fs.readFile(layout.managedManifestPath, "utf8"))) + await fs.writeFile(layout.managedManifestPath, JSON.stringify(manifest, null, 2)) + + const machineKey = (await fs.readFile(path.join(process.env.COMPOUND_ENGINEERING_HOME!, ".compound-engineering", "pi-managed-key"), "utf8")).trim() + const hash = createHash("sha256").update(JSON.stringify(createPiManagedSectionHashPayload(layout.root, effectiveSection))).digest("hex") + const verification = JSON.parse(await fs.readFile(layout.verificationPath, "utf8")) as any + verification[sectionName] = { hash: `${machineKey}:${hash}` } + await fs.writeFile(layout.verificationPath, JSON.stringify(verification, null, 2)) +} + +async function seedVerifiedProjectInstallNameMaps( + outputRoot: string, + nameMaps: { + agents?: Record + skills?: Record + prompts?: Record + }, +): Promise { + const layout = resolvePiLayout(outputRoot, "install") + const state = replacePiManagedSection(null, "install", createPiManagedSection({ nameMaps }), "compound-engineering") + await writePiManagedState(layout, state, { install: true, sync: false }) +} + +async function seedVerifiedSyncNameMaps( + outputRoot: string, + nameMaps: { + agents?: Record + skills?: Record + prompts?: Record + }, + options?: { + sharedResources?: { + compatExtension?: boolean + mcporterConfig?: boolean + } + }, +): Promise { + const layout = resolvePiLayout(outputRoot, "sync") + const state = replacePiManagedSection(null, "sync", createPiManagedSection({ + nameMaps, + sharedResources: options?.sharedResources, + }), "compound-engineering") + await writePiManagedState(layout, state, { install: false, sync: true }) +} + +async function seedVerifiedGlobalSyncNameMaps( + homeRoot: string, + nameMaps: { + agents?: Record + skills?: Record + prompts?: Record + }, +): Promise { + const globalRoot = path.join(homeRoot, ".pi", "agent") + const layout = resolvePiLayout(globalRoot, "sync") + const state = replacePiManagedSection(null, "sync", createPiManagedSection({ nameMaps }), "compound-engineering") + await writePiManagedState(layout, state, { install: false, sync: true }) +} + +async function loadCompatHelpers(moduleRoot: string): Promise<{ + resolveAgentName: (cwd: string, value: string) => string + resolvePromptName: (cwd: string, value: string) => string + resolveMcporterConfigPath: (cwd: string, explicit?: string) => string | undefined + resolveTaskCwd: (cwd: string, taskCwd?: string) => string + setAliasManifestSignatureHookForTests: (hook: ((filePath: string) => void) | null) => void + default: (pi: { + registerTool: (tool: { name: string; execute: (...args: any[]) => any }) => void + exec: (...args: any[]) => any + }) => void +}> { + const compatPath = path.join(moduleRoot, "extensions", "compound-engineering-compat.ts") + await fs.mkdir(path.dirname(compatPath), { recursive: true }) + + const source = PI_COMPAT_EXTENSION_SOURCE.replace( + 'import { Type } from "@sinclair/typebox"\n', + 'const Type = { Object: (value: unknown) => value, String: (value: unknown) => value, Optional: (value: unknown) => value, Array: (value: unknown) => value, Number: (value: unknown) => value, Boolean: (value: unknown) => value, Record: (_key: unknown, value: unknown) => value, Any: () => ({}) }\n', + ) + "\nexport { resolveAgentName, resolvePromptName, resolveMcporterConfigPath, resolveTaskCwd }\n" + + await fs.writeFile(compatPath, source) + return import(pathToFileURL(compatPath).href + `?t=${Date.now()}`) +} + +async function readTreeSnapshot(root: string): Promise> { + const snapshot: Record = {} + + async function walk(dir: string, relativeDir = ""): Promise { + const entries = await fs.readdir(dir, { withFileTypes: true }).catch(() => []) + for (const entry of entries.sort((left, right) => left.name.localeCompare(right.name))) { + const fullPath = path.join(dir, entry.name) + const relativePath = relativeDir ? path.join(relativeDir, entry.name) : entry.name + if (entry.isDirectory()) { + await walk(fullPath, relativePath) + continue + } + if (entry.isFile()) { + snapshot[relativePath] = await fs.readFile(fullPath, "utf8") + } + } + } + + await walk(root) + return snapshot +} + +function normalizeRootPaths(value: T, root: string): T { + return JSON.parse(JSON.stringify(value).replaceAll(root, "")) as T +} + +afterEach(() => { + setAtomicWriteFailureHookForTests(null) + setManagedPathSnapshotHookForTests(null) + setPiSyncPassHookForTests(null) + setPiSyncRerunModeForTests(null) + setPiSyncCommandConversionHookForTests(null) + setPiSkillFullCompareHookForTests(null) + setPiSkillSourceAnalysisHookForTests(null) + setPiSkillSourceFingerprintHookForTests(null) + setPiPolicyFingerprintForTests(null) + delete process.env.COMPOUND_ENGINEERING_PI_POLICY_FINGERPRINT +}) describe("syncToPi", () => { - test("symlinks skills and writes MCPorter config", async () => { + test("deduped Pi names stay within the managed-name validity limit", () => { + const base = normalizePiSkillName("a".repeat(80)) + const used = new Set([base]) + let latest = "" + + for (let index = 2; index <= 1000; index += 1) { + latest = uniquePiSkillName(base, used) + } + + expect(latest.length).toBeLessThanOrEqual(64) + expect(isSafePiManagedName(latest)).toBe(true) + }) + + test("shared-resource contract distinguishes active, preserved-untrusted, and absent states", () => { + expect(derivePiSharedResourceContract({ + nextOwns: true, + })).toEqual({ + state: "active", + retain: true, + advertise: true, + }) + + expect(derivePiSharedResourceContract({ + preserveUntrusted: true, + })).toEqual({ + state: "preserved-untrusted", + retain: false, + advertise: false, + }) + }) + + test("uses one shared unsupported-status classifier for prompt and skill sync outcomes", () => { + expect(classifyUnsupportedPiSyncStatus("Unsupported unresolved first-party qualified ref for Pi sync: compound-engineering:missing")).toBe("retryable") + expect(classifyUnsupportedPiSyncStatus("Unsupported foreign qualified Task ref for Pi sync: unknown-plugin:review:bad")).toBe("blocked-by-policy") + expect(classifyUnsupportedPiSyncStatus("Unsupported malformed prompt ref")).toBe("unsupported-final") + }) + + test("classifies freshly written sync manifests as verified for their canonical root", async () => { + const tempRoot = await fs.mkdtemp(path.join(os.tmpdir(), "sync-pi-verified-")) + const stateHome = path.join(tempRoot, "state-home") + process.env.COMPOUND_ENGINEERING_HOME = stateHome + + await syncToPi({ + skills: [], + commands: [ + { + name: "plan-review", + description: "Personal review", + body: "Review body", + sourcePath: path.join(tempRoot, "commands", "plan-review.md"), + }, + ], + mcpServers: {}, + }, tempRoot) + + const trust = await loadPiManagedStateWithTrust(resolvePiLayout(tempRoot, "sync")) + expect(trust.status).toBe("verified") + expect(trust.state?.sync.artifacts).toHaveLength(1) + + delete process.env.COMPOUND_ENGINEERING_HOME + }) + + test("materializes synced skills and writes MCPorter config", async () => { const tempRoot = await fs.mkdtemp(path.join(os.tmpdir(), "sync-pi-")) const fixtureSkillDir = path.join(import.meta.dir, "fixtures", "sample-plugin", "skills", "skill-one") + const layout = resolvePiLayout(tempRoot, "sync") const config: ClaudeHomeConfig = { skills: [ @@ -26,11 +243,11 @@ describe("syncToPi", () => { await syncToPi(config, tempRoot) - const linkedSkillPath = path.join(tempRoot, "skills", "skill-one") + const linkedSkillPath = path.join(layout.skillsDir, "skill-one") const linkedStat = await fs.lstat(linkedSkillPath) - expect(linkedStat.isSymbolicLink()).toBe(true) + expect(linkedStat.isDirectory()).toBe(true) - const mcporterPath = path.join(tempRoot, "compound-engineering", "mcporter.json") + const mcporterPath = layout.mcporterConfigPath const mcporterConfig = JSON.parse(await fs.readFile(mcporterPath, "utf8")) as { mcpServers: Record } @@ -39,6 +256,463 @@ describe("syncToPi", () => { expect(mcporterConfig.mcpServers.local?.command).toBe("echo") }) + test("writes custom sync roots at the direct sync layout", async () => { + const tempRoot = await fs.mkdtemp(path.join(os.tmpdir(), "sync-pi-custom-root-")) + const outputRoot = path.join(tempRoot, "custom-root") + const layout = resolvePiLayout(outputRoot, "sync") + + await syncToPi({ + skills: [], + commands: [ + { + name: "plan-review", + description: "Personal review", + body: "Review body", + sourcePath: path.join(tempRoot, "commands", "plan-review.md"), + }, + ], + mcpServers: {}, + }, outputRoot) + + expect(await fs.readFile(path.join(layout.promptsDir, "plan-review.md"), "utf8")).toContain("Review body") + await expect(fs.access(path.join(outputRoot, "prompts", "plan-review.md"))).resolves.toBeNull() + }) + + test("accepts top-level personal skills discovered through trusted symlink entries", async () => { + const tempRoot = await fs.mkdtemp(path.join(os.tmpdir(), "sync-pi-external-root-")) + const actualSkillsRoot = path.join(tempRoot, "actual-skills") + const linkedSkillsRoot = path.join(tempRoot, "linked-skills") + const externalSkillDir = path.join(tempRoot, "external-skill") + const warnSpy = spyOn(console, "warn").mockImplementation(() => {}) + + await fs.mkdir(actualSkillsRoot, { recursive: true }) + await fs.mkdir(externalSkillDir, { recursive: true }) + await fs.writeFile( + path.join(externalSkillDir, "SKILL.md"), + [ + "---", + "name: docs-skill", + "description: External skill", + "---", + "", + "- /skill:docs-skill", + ].join("\n"), + ) + await fs.symlink(actualSkillsRoot, linkedSkillsRoot) + await fs.symlink(externalSkillDir, path.join(linkedSkillsRoot, "docs-skill")) + + await syncToPi({ + skills: [ + { + name: "docs-skill", + entryDir: path.join(linkedSkillsRoot, "docs-skill"), + trustedRoot: linkedSkillsRoot, + trustedBoundary: externalSkillDir, + sourceDir: externalSkillDir, + skillPath: path.join(linkedSkillsRoot, "docs-skill", "SKILL.md"), + }, + ], + mcpServers: {}, + }, tempRoot) + + expect(await fs.readFile(path.join(resolvePiLayout(tempRoot, "sync").skillsDir, "docs-skill", "SKILL.md"), "utf8")).toContain("name: docs-skill") + expect(warnSpy).not.toHaveBeenCalled() + + warnSpy.mockRestore() + }) + + test("accepts top-level personal skills that resolve within the lexical trusted root", async () => { + const tempRoot = await fs.mkdtemp(path.join(os.tmpdir(), "sync-pi-in-root-home-symlink-")) + const actualSkillsRoot = path.join(tempRoot, "actual-skills") + const linkedSkillsRoot = path.join(tempRoot, "linked-skills") + const canonicalSkillDir = path.join(actualSkillsRoot, "reviewer-real") + const warnSpy = spyOn(console, "warn").mockImplementation(() => {}) + + await fs.mkdir(canonicalSkillDir, { recursive: true }) + await fs.writeFile( + path.join(canonicalSkillDir, "SKILL.md"), + [ + "---", + "name: reviewer", + "description: In-root symlinked personal skill", + "---", + "", + "Body", + ].join("\n"), + ) + await fs.symlink(actualSkillsRoot, linkedSkillsRoot) + await fs.symlink(canonicalSkillDir, path.join(actualSkillsRoot, "reviewer")) + + await syncToPi({ + skills: [ + { + name: "reviewer", + entryDir: path.join(linkedSkillsRoot, "reviewer"), + trustedRoot: linkedSkillsRoot, + trustedBoundary: canonicalSkillDir, + sourceDir: canonicalSkillDir, + skillPath: path.join(linkedSkillsRoot, "reviewer", "SKILL.md"), + }, + ], + mcpServers: {}, + }, tempRoot) + + expect(await fs.readFile(path.join(resolvePiLayout(tempRoot, "sync").skillsDir, "reviewer", "SKILL.md"), "utf8")).toContain("name: reviewer") + expect(warnSpy).not.toHaveBeenCalled() + + warnSpy.mockRestore() + }) + + test("records symlinked top-level personal skills using a canonical trusted boundary", async () => { + const tempRoot = await fs.mkdtemp(path.join(os.tmpdir(), "sync-pi-canonical-trusted-boundary-")) + const actualSkillsRoot = path.join(tempRoot, "actual-skills") + const linkedSkillsRoot = path.join(tempRoot, "skills") + const externalSkillDir = path.join(tempRoot, "external-skill") + + await fs.mkdir(actualSkillsRoot, { recursive: true }) + await fs.mkdir(externalSkillDir, { recursive: true }) + await fs.writeFile(path.join(externalSkillDir, "SKILL.md"), "---\nname: reviewer\n---\nReview things.\n") + await fs.writeFile(path.join(externalSkillDir, "shared.txt"), "hello\n") + await fs.symlink(actualSkillsRoot, linkedSkillsRoot) + await fs.symlink(externalSkillDir, path.join(linkedSkillsRoot, "reviewer")) + + const config = await loadClaudeHome(tempRoot) + + expect(config.skills).toHaveLength(1) + expect(config.skills[0]?.entryDir).toBe(path.join(linkedSkillsRoot, "reviewer")) + expect(config.skills[0]?.trustedRoot).toBe(linkedSkillsRoot) + expect(config.skills[0]?.trustedBoundary).toBe(externalSkillDir) + expect(config.skills[0]?.sourceDir).toBe(externalSkillDir) + }) + + test("materializes invalid skill names into Pi-safe directories", async () => { + const tempRoot = await fs.mkdtemp(path.join(os.tmpdir(), "sync-pi-invalid-")) + const sourceSkillDir = path.join(tempRoot, "claude-skill") + await fs.mkdir(sourceSkillDir, { recursive: true }) + await fs.writeFile( + path.join(sourceSkillDir, "SKILL.md"), + [ + "---", + "name: ce:plan", + "description: Plan workflow", + "---", + "", + "# Plan", + "", + "- Task compound-engineering:research:repo-research-analyst(feature_description)", + ].join("\n"), + ) + + const config: ClaudeHomeConfig = { + skills: [ + { + name: "ce:plan", + sourceDir: sourceSkillDir, + skillPath: path.join(sourceSkillDir, "SKILL.md"), + }, + ], + mcpServers: {}, + } + + await syncToPi(config, tempRoot) + + const materializedSkillPath = path.join(tempRoot, "skills", "ce-plan") + const skillStat = await fs.lstat(materializedSkillPath) + expect(skillStat.isSymbolicLink()).toBe(false) + + const copiedSkill = await fs.readFile(path.join(materializedSkillPath, "SKILL.md"), "utf8") + expect(copiedSkill).toContain("name: ce-plan") + expect(copiedSkill).not.toContain("name: ce:plan") + expect(copiedSkill).toContain("Run ce_subagent with agent=\"repo-research-analyst\" and task=\"feature_description\".") + }) + + test("materializes valid Pi-named skills when body needs Pi-specific rewrites", async () => { + const tempRoot = await fs.mkdtemp(path.join(os.tmpdir(), "sync-pi-transform-")) + const sourceSkillDir = path.join(tempRoot, "claude-skill-valid") + await fs.mkdir(sourceSkillDir, { recursive: true }) + await fs.writeFile( + path.join(sourceSkillDir, "SKILL.md"), + [ + "---", + "name: ce-plan", + "description: Plan workflow", + "---", + "", + "# Plan", + "", + "- Task compound-engineering:research:repo-research-analyst(feature_description)", + ].join("\n"), + ) + + const config: ClaudeHomeConfig = { + skills: [ + { + name: "ce-plan", + sourceDir: sourceSkillDir, + skillPath: path.join(sourceSkillDir, "SKILL.md"), + }, + ], + mcpServers: {}, + } + + await syncToPi(config, tempRoot) + + const syncedSkillPath = path.join(tempRoot, "skills", "ce-plan") + const skillStat = await fs.lstat(syncedSkillPath) + expect(skillStat.isSymbolicLink()).toBe(false) + + const copiedSkill = await fs.readFile(path.join(syncedSkillPath, "SKILL.md"), "utf8") + expect(copiedSkill).toContain("Run ce_subagent with agent=\"repo-research-analyst\" and task=\"feature_description\".") + }) + + test("keeps a previously materialized Pi skill directory materialized after rewrites are no longer needed", async () => { + const tempRoot = await fs.mkdtemp(path.join(os.tmpdir(), "sync-pi-dir-to-symlink-")) + const sourceSkillDir = path.join(tempRoot, "claude-skill-transition") + const syncedSkillPath = path.join(tempRoot, "skills", "ce-plan") + const skillPath = path.join(sourceSkillDir, "SKILL.md") + + await fs.mkdir(sourceSkillDir, { recursive: true }) + await fs.writeFile( + skillPath, + [ + "---", + "name: ce-plan", + "description: Plan workflow", + "---", + "", + "# Plan", + "", + "- Task compound-engineering:research:repo-research-analyst(feature_description)", + ].join("\n"), + ) + + const config: ClaudeHomeConfig = { + skills: [ + { + name: "ce-plan", + sourceDir: sourceSkillDir, + skillPath, + }, + ], + mcpServers: {}, + } + + await syncToPi(config, tempRoot) + expect((await fs.lstat(syncedSkillPath)).isSymbolicLink()).toBe(false) + + await fs.writeFile( + skillPath, + [ + "---", + "name: ce-plan", + "description: Plan workflow", + "---", + "", + "# Plan", + "", + "No Pi rewrite needed.", + "Updated from source.", + ].join("\n"), + ) + + await syncToPi(config, tempRoot) + + const syncedStat = await fs.lstat(syncedSkillPath) + expect(syncedStat.isDirectory()).toBe(true) + + const liveSkill = await fs.readFile(path.join(syncedSkillPath, "SKILL.md"), "utf8") + expect(liveSkill).toContain("Updated from source.") + expect(liveSkill).not.toContain("Run ce_subagent") + + const files = await fs.readdir(path.join(tempRoot, "skills")) + const backupDirName = files.find((file) => file.startsWith("ce-plan.bak.")) + expect(backupDirName).toBeUndefined() + }) + + test("removes stale nested entries from a materialized synced skill without creating a backup", async () => { + const tempRoot = await fs.mkdtemp(path.join(os.tmpdir(), "sync-pi-stale-entry-")) + const sourceSkillDir = path.join(tempRoot, "claude-skill-stale") + const syncedSkillPath = path.join(tempRoot, "skills", "ce-plan") + const skillPath = path.join(sourceSkillDir, "SKILL.md") + + await fs.mkdir(path.join(sourceSkillDir, "nested", "remove-me"), { recursive: true }) + await fs.writeFile(skillPath, "---\nname: ce-plan\ndescription: Plan workflow\n---\n\n# Plan\n") + await fs.writeFile(path.join(sourceSkillDir, "nested", "keep.txt"), "keep\n") + await fs.writeFile(path.join(sourceSkillDir, "nested", "remove-me", "gone.txt"), "gone\n") + + const config: ClaudeHomeConfig = { + skills: [{ name: "ce-plan", sourceDir: sourceSkillDir, skillPath }], + mcpServers: {}, + } + + await syncToPi(config, tempRoot) + await fs.rm(path.join(sourceSkillDir, "nested", "remove-me"), { recursive: true, force: true }) + await syncToPi(config, tempRoot) + + expect(await fs.readFile(path.join(syncedSkillPath, "nested", "keep.txt"), "utf8")).toBe("keep\n") + await expect(fs.access(path.join(syncedSkillPath, "nested", "remove-me"))).rejects.toBeDefined() + + const files = await fs.readdir(path.join(tempRoot, "skills")) + const backupDirName = files.find((file) => file.startsWith("ce-plan.bak.")) + expect(backupDirName).toBeUndefined() + }) + + test("falls back to whole-directory replacement for nested file-to-directory transitions during sync", async () => { + const tempRoot = await fs.mkdtemp(path.join(os.tmpdir(), "sync-pi-shape-fallback-")) + const sourceSkillDir = path.join(tempRoot, "claude-skill-shape") + const syncedSkillPath = path.join(tempRoot, "skills", "ce-plan") + const skillPath = path.join(sourceSkillDir, "SKILL.md") + + await fs.mkdir(sourceSkillDir, { recursive: true }) + await fs.writeFile(skillPath, "---\nname: ce-plan\ndescription: Plan workflow\n---\n\n# Plan\n") + await fs.writeFile(path.join(sourceSkillDir, "nested"), "file first\n") + + const config: ClaudeHomeConfig = { + skills: [{ name: "ce-plan", sourceDir: sourceSkillDir, skillPath }], + mcpServers: {}, + } + + await syncToPi(config, tempRoot) + + await fs.rm(path.join(sourceSkillDir, "nested"), { force: true }) + await fs.mkdir(path.join(sourceSkillDir, "nested"), { recursive: true }) + await fs.writeFile(path.join(sourceSkillDir, "nested", "child.txt"), "child\n") + + await syncToPi(config, tempRoot) + + expect(await fs.readFile(path.join(syncedSkillPath, "nested", "child.txt"), "utf8")).toBe("child\n") + + const files = await fs.readdir(path.join(tempRoot, "skills")) + const backupDirName = files.find((file) => file.startsWith("ce-plan.bak.")) + expect(backupDirName).toBeDefined() + }) + + test("replaces an existing symlink when Pi-specific materialization is required", async () => { + const tempRoot = await fs.mkdtemp(path.join(os.tmpdir(), "sync-pi-symlink-migration-")) + const existingTargetDir = path.join(tempRoot, "existing-skill") + const sourceSkillDir = path.join(tempRoot, "claude-skill-migrated") + const syncedSkillPath = path.join(tempRoot, "skills", "ce-plan") + + await fs.mkdir(existingTargetDir, { recursive: true }) + await fs.writeFile(path.join(existingTargetDir, "SKILL.md"), "---\nname: ce-plan\ndescription: Existing\n---\n\n# Existing\n") + await fs.mkdir(path.dirname(syncedSkillPath), { recursive: true }) + await fs.symlink(existingTargetDir, syncedSkillPath) + + await fs.mkdir(sourceSkillDir, { recursive: true }) + await fs.writeFile( + path.join(sourceSkillDir, "SKILL.md"), + [ + "---", + "name: ce-plan", + "description: Plan workflow", + "---", + "", + "# Plan", + "", + "- Task compound-engineering:research:repo-research-analyst(feature_description)", + ].join("\n"), + ) + + const config: ClaudeHomeConfig = { + skills: [ + { + name: "ce-plan", + sourceDir: sourceSkillDir, + skillPath: path.join(sourceSkillDir, "SKILL.md"), + }, + ], + mcpServers: {}, + } + + await syncToPi(config, tempRoot) + + const skillStat = await fs.lstat(syncedSkillPath) + expect(skillStat.isSymbolicLink()).toBe(false) + + const copiedSkill = await fs.readFile(path.join(syncedSkillPath, "SKILL.md"), "utf8") + expect(copiedSkill).toContain("Run ce_subagent with agent=\"repo-research-analyst\" and task=\"feature_description\".") + }) + + test("rejects Pi skill replacement when the skill parent directory becomes a symlinked ancestor", async () => { + const tempRoot = await fs.mkdtemp(path.join(os.tmpdir(), "sync-pi-replacement-ancestor-symlink-")) + const sourceSkillDir = path.join(tempRoot, "claude-skill-migrated") + const skillsParent = path.join(tempRoot, "skills") + const syncedSkillPath = path.join(skillsParent, "ce-plan") + const externalRoot = path.join(tempRoot, "external") + + await fs.mkdir(path.join(syncedSkillPath, "nested"), { recursive: true }) + await fs.writeFile(path.join(syncedSkillPath, "SKILL.md"), "---\nname: ce-plan\n---\n\nExisting\n") + await fs.writeFile(path.join(syncedSkillPath, "nested", "keep.txt"), "keep\n") + + await fs.mkdir(sourceSkillDir, { recursive: true }) + await fs.writeFile(path.join(sourceSkillDir, "SKILL.md"), "---\nname: ce-plan\n---\n\nReplacement\n") + await fs.writeFile(path.join(sourceSkillDir, "nested"), "file now\n") + await fs.mkdir(externalRoot, { recursive: true }) + + await fs.rename(skillsParent, `${skillsParent}-bak`) + await fs.symlink(externalRoot, skillsParent) + + await expect(syncToPi({ + skills: [ + { + name: "ce-plan", + sourceDir: sourceSkillDir, + skillPath: path.join(sourceSkillDir, "SKILL.md"), + }, + ], + mcpServers: {}, + }, tempRoot)).rejects.toThrow("symlinked ancestor") + + await expect(fs.access(path.join(externalRoot, "ce-plan"))).rejects.toBeDefined() + }) + + test("updates an existing real directory in place when Pi-specific materialization can converge safely", async () => { + const tempRoot = await fs.mkdtemp(path.join(os.tmpdir(), "sync-pi-backup-dir-")) + const sourceSkillDir = path.join(tempRoot, "claude-skill-updated") + const syncedSkillPath = path.join(tempRoot, "skills", "ce-plan") + + await fs.mkdir(syncedSkillPath, { recursive: true }) + await fs.writeFile( + path.join(syncedSkillPath, "SKILL.md"), + "---\nname: ce-plan\ndescription: Existing\n---\n\n# Existing\n\nLocal edits\n", + ) + + await fs.mkdir(sourceSkillDir, { recursive: true }) + await fs.writeFile( + path.join(sourceSkillDir, "SKILL.md"), + [ + "---", + "name: ce-plan", + "description: Plan workflow", + "---", + "", + "# Plan", + "", + "- Task compound-engineering:research:repo-research-analyst(feature_description)", + ].join("\n"), + ) + + const config: ClaudeHomeConfig = { + skills: [ + { + name: "ce-plan", + sourceDir: sourceSkillDir, + skillPath: path.join(sourceSkillDir, "SKILL.md"), + }, + ], + mcpServers: {}, + } + + await syncToPi(config, tempRoot) + + const copiedSkill = await fs.readFile(path.join(syncedSkillPath, "SKILL.md"), "utf8") + expect(copiedSkill).toContain("Run ce_subagent with agent=\"repo-research-analyst\" and task=\"feature_description\".") + + const files = await fs.readdir(path.join(tempRoot, "skills")) + const backupDirName = files.find((file) => file.startsWith("ce-plan.bak.")) + expect(backupDirName).toBeUndefined() + }) + test("merges existing MCPorter config", async () => { const tempRoot = await fs.mkdtemp(path.join(os.tmpdir(), "sync-pi-merge-")) const mcporterPath = path.join(tempRoot, "compound-engineering", "mcporter.json") @@ -65,4 +739,5021 @@ describe("syncToPi", () => { expect(merged.mcpServers.existing?.baseUrl).toBe("https://example.com/mcp") expect(merged.mcpServers.context7?.baseUrl).toBe("https://mcp.context7.com/mcp") }) + + test("writes compat extension for MCP-only sync", async () => { + const tempRoot = await fs.mkdtemp(path.join(os.tmpdir(), "sync-pi-mcp-only-compat-")) + + const config: ClaudeHomeConfig = { + skills: [], + mcpServers: { + context7: { url: "https://mcp.context7.com/mcp" }, + }, + } + + await syncToPi(config, tempRoot) + + const compatPath = path.join(tempRoot, "extensions", "compound-engineering-compat.ts") + const compatContent = await fs.readFile(compatPath, "utf8") + expect(compatContent).toContain('name: "mcporter_list"') + expect(compatContent).toContain('name: "mcporter_call"') + expect(compatContent).not.toContain('configPath: Type.Optional') + }) + + test("regenerates valid frontmatter when a skill has malformed frontmatter", async () => { + const tempRoot = await fs.mkdtemp(path.join(os.tmpdir(), "sync-pi-malformed-")) + const sourceSkillDir = path.join(tempRoot, "claude-skill") + await fs.mkdir(sourceSkillDir, { recursive: true }) + await fs.writeFile( + path.join(sourceSkillDir, "SKILL.md"), + [ + "---", + "name: [broken", + "description: broken frontmatter", + "---", + "", + "# Broken skill", + "", + "- Task compound-engineering:research:repo-research-analyst(feature_description)", + ].join("\n"), + ) + + const config: ClaudeHomeConfig = { + skills: [ + { + name: "broken-skill", + sourceDir: sourceSkillDir, + skillPath: path.join(sourceSkillDir, "SKILL.md"), + }, + ], + mcpServers: {}, + } + + await syncToPi(config, tempRoot) + + const syncedSkillPath = path.join(tempRoot, "skills", "broken-skill") + const skillStat = await fs.lstat(syncedSkillPath) + expect(skillStat.isDirectory()).toBe(true) + + const copiedSkill = await fs.readFile(path.join(syncedSkillPath, "SKILL.md"), "utf8") + expect(copiedSkill).toContain("Run ce_subagent with agent=\"repo-research-analyst\" and task=\"feature_description\".") + expect(copiedSkill).toContain("name: broken-skill") + expect(copiedSkill).not.toContain("name: [broken") + }) + + test("does not create another backup when malformed skill output is already converged", async () => { + const tempRoot = await fs.mkdtemp(path.join(os.tmpdir(), "sync-pi-malformed-stable-")) + const sourceSkillDir = path.join(tempRoot, "claude-skill") + await fs.mkdir(sourceSkillDir, { recursive: true }) + await fs.writeFile( + path.join(sourceSkillDir, "SKILL.md"), + [ + "---", + "name: [broken", + "description: broken frontmatter", + "---", + "", + "# Broken skill", + "", + "- Task compound-engineering:research:repo-research-analyst(feature_description)", + ].join("\n"), + ) + + const config: ClaudeHomeConfig = { + skills: [ + { + name: "broken-skill", + sourceDir: sourceSkillDir, + skillPath: path.join(sourceSkillDir, "SKILL.md"), + }, + ], + mcpServers: {}, + } + + await syncToPi(config, tempRoot) + const skillsDir = path.join(tempRoot, "skills") + const before = await fs.readdir(skillsDir) + + await syncToPi(config, tempRoot) + const after = await fs.readdir(skillsDir) + + expect(before.filter((entry) => entry.startsWith("broken-skill.bak."))).toHaveLength(0) + expect(after.filter((entry) => entry.startsWith("broken-skill.bak."))).toHaveLength(0) + }) + + test("repairs a malformed previously materialized skill target on rerun", async () => { + const tempRoot = await fs.mkdtemp(path.join(os.tmpdir(), "sync-pi-malformed-target-repair-")) + const sourceSkillDir = path.join(tempRoot, "claude-skill") + const targetSkillDir = path.join(tempRoot, "skills", "broken-skill") + await fs.mkdir(sourceSkillDir, { recursive: true }) + await fs.mkdir(targetSkillDir, { recursive: true }) + await fs.writeFile( + path.join(sourceSkillDir, "SKILL.md"), + [ + "---", + "name: [broken", + "description: broken frontmatter", + "---", + "", + "# Broken skill", + ].join("\n"), + ) + await fs.writeFile(path.join(targetSkillDir, "SKILL.md"), "---\nname: [broken\n---\n\n# stale\n") + + await syncToPi({ + skills: [ + { + name: "broken-skill", + sourceDir: sourceSkillDir, + skillPath: path.join(sourceSkillDir, "SKILL.md"), + }, + ], + mcpServers: {}, + }, tempRoot) + + const repaired = await fs.readFile(path.join(targetSkillDir, "SKILL.md"), "utf8") + expect(repaired).toContain("name: broken-skill") + expect(repaired).not.toContain("name: [broken") + }) + + test("rewrites frontmatterless skills during Pi sync", async () => { + const tempRoot = await fs.mkdtemp(path.join(os.tmpdir(), "sync-pi-frontmatterless-")) + const sourceSkillDir = path.join(tempRoot, "claude-skill-frontmatterless") + await fs.mkdir(sourceSkillDir, { recursive: true }) + await fs.writeFile( + path.join(sourceSkillDir, "SKILL.md"), + [ + "# Personal skill", + "", + "- Task compound-engineering:research:repo-research-analyst(feature_description)", + ].join("\n"), + ) + + const config: ClaudeHomeConfig = { + skills: [ + { + name: "frontmatterless-skill", + sourceDir: sourceSkillDir, + skillPath: path.join(sourceSkillDir, "SKILL.md"), + }, + ], + mcpServers: {}, + } + + await syncToPi(config, tempRoot) + + const copiedSkill = await fs.readFile(path.join(tempRoot, "skills", "frontmatterless-skill", "SKILL.md"), "utf8") + expect(copiedSkill).toContain("Run ce_subagent with agent=\"repo-research-analyst\" and task=\"feature_description\".") + expect(copiedSkill).not.toContain("name:") + }) + + test("does not create another backup when frontmatterless skill output is already converged", async () => { + const tempRoot = await fs.mkdtemp(path.join(os.tmpdir(), "sync-pi-frontmatterless-stable-")) + const sourceSkillDir = path.join(tempRoot, "claude-skill-frontmatterless") + await fs.mkdir(sourceSkillDir, { recursive: true }) + await fs.writeFile( + path.join(sourceSkillDir, "SKILL.md"), + [ + "# Personal skill", + "", + "- Task compound-engineering:research:repo-research-analyst(feature_description)", + ].join("\n"), + ) + + const config: ClaudeHomeConfig = { + skills: [ + { + name: "frontmatterless-skill", + sourceDir: sourceSkillDir, + skillPath: path.join(sourceSkillDir, "SKILL.md"), + }, + ], + mcpServers: {}, + } + + await syncToPi(config, tempRoot) + const skillsDir = path.join(tempRoot, "skills") + const before = await fs.readdir(skillsDir) + + await syncToPi(config, tempRoot) + const after = await fs.readdir(skillsDir) + + expect(before.filter((entry) => entry.startsWith("frontmatterless-skill.bak."))).toHaveLength(0) + expect(after.filter((entry) => entry.startsWith("frontmatterless-skill.bak."))).toHaveLength(0) + }) + + test("keeps a previously invalid materialized skill materialized when it becomes Pi-compatible without forcing a backup", async () => { + const tempRoot = await fs.mkdtemp(path.join(os.tmpdir(), "sync-pi-invalid-recovery-")) + const sourceSkillDir = path.join(tempRoot, "claude-skill") + const syncedSkillPath = path.join(tempRoot, "skills", "frontmatterless-skill") + const skillPath = path.join(sourceSkillDir, "SKILL.md") + + await fs.mkdir(sourceSkillDir, { recursive: true }) + await fs.writeFile( + skillPath, + [ + "# Personal skill", + "", + "- Task compound-engineering:research:repo-research-analyst(feature_description)", + ].join("\n"), + ) + + const config: ClaudeHomeConfig = { + skills: [ + { + name: "frontmatterless-skill", + sourceDir: sourceSkillDir, + skillPath, + }, + ], + mcpServers: {}, + } + + await syncToPi(config, tempRoot) + expect((await fs.lstat(syncedSkillPath)).isSymbolicLink()).toBe(false) + + await fs.writeFile( + skillPath, + [ + "---", + "name: frontmatterless-skill", + "description: Recovered skill", + "---", + "", + "No Pi rewrite needed.", + ].join("\n"), + ) + + await syncToPi(config, tempRoot) + + const syncedStat = await fs.lstat(syncedSkillPath) + expect(syncedStat.isDirectory()).toBe(true) + + const liveSkill = await fs.readFile(path.join(syncedSkillPath, "SKILL.md"), "utf8") + expect(liveSkill).toContain("Recovered skill") + expect(liveSkill).not.toContain("Run ce_subagent") + + const files = await fs.readdir(path.join(tempRoot, "skills")) + const backupDirName = files.find((file) => file.startsWith("frontmatterless-skill.bak.")) + expect(backupDirName).toBeUndefined() + }) + + test("resolves /skill: refs to deduped targets when personal skill names collide", async () => { + const tempRoot = await fs.mkdtemp(path.join(os.tmpdir(), "sync-pi-skill-collision-")) + const skillDirHyphen = path.join(tempRoot, "generate-command") + const skillDirUnderscore = path.join(tempRoot, "generate_command") + + await fs.mkdir(skillDirHyphen, { recursive: true }) + await fs.writeFile( + path.join(skillDirHyphen, "SKILL.md"), + [ + "---", + "name: generate-command", + "description: Hyphen skill", + "---", + "", + "# Hyphen skill", + "", + "Then run /skill:generate_command for the other one.", + ].join("\n"), + ) + + await fs.mkdir(skillDirUnderscore, { recursive: true }) + await fs.writeFile( + path.join(skillDirUnderscore, "SKILL.md"), + [ + "---", + "name: generate_command", + "description: Underscore skill", + "---", + "", + "# Underscore skill", + ].join("\n"), + ) + + const config: ClaudeHomeConfig = { + skills: [ + { + name: "generate_command", + sourceDir: skillDirUnderscore, + skillPath: path.join(skillDirUnderscore, "SKILL.md"), + }, + { + name: "generate-command", + sourceDir: skillDirHyphen, + skillPath: path.join(skillDirHyphen, "SKILL.md"), + }, + ], + mcpServers: {}, + } + + await syncToPi(config, tempRoot) + + // After codepoint sorting: generate-command (0x2D) < generate_command (0x5F) + // generate-command gets base name, generate_command gets -2 + const baseSkill = await fs.readFile(path.join(tempRoot, "skills", "generate-command", "SKILL.md"), "utf8") + expect(baseSkill).toContain("/skill:generate-command-2") + + const suffixedSkill = await fs.readFile(path.join(tempRoot, "skills", "generate-command-2", "SKILL.md"), "utf8") + expect(suffixedSkill).toContain("name: generate-command-2") + }) + + test("resolves Task refs to deduped skill targets when personal skill names collide", async () => { + const tempRoot = await fs.mkdtemp(path.join(os.tmpdir(), "sync-pi-task-skill-collision-")) + const skillDirHyphen = path.join(tempRoot, "generate-command") + const skillDirUnderscore = path.join(tempRoot, "generate_command") + + await fs.mkdir(skillDirHyphen, { recursive: true }) + await fs.writeFile( + path.join(skillDirHyphen, "SKILL.md"), + [ + "---", + "name: generate-command", + "description: Hyphen skill", + "---", + "", + "# Hyphen skill", + "", + "Task generate_command(create command)", + ].join("\n"), + ) + + await fs.mkdir(skillDirUnderscore, { recursive: true }) + await fs.writeFile( + path.join(skillDirUnderscore, "SKILL.md"), + [ + "---", + "name: generate_command", + "description: Underscore skill", + "---", + "", + "# Underscore skill", + ].join("\n"), + ) + + const config: ClaudeHomeConfig = { + skills: [ + { + name: "generate_command", + sourceDir: skillDirUnderscore, + skillPath: path.join(skillDirUnderscore, "SKILL.md"), + }, + { + name: "generate-command", + sourceDir: skillDirHyphen, + skillPath: path.join(skillDirHyphen, "SKILL.md"), + }, + ], + mcpServers: {}, + } + + await syncToPi(config, tempRoot) + + const baseSkill = await fs.readFile(path.join(tempRoot, "skills", "generate-command", "SKILL.md"), "utf8") + expect(baseSkill).toContain('Run ce_subagent with agent="generate-command-2" and task="create command".') + }) + + test("rewritten synced skill prompt refs match emitted prompt filenames when command names collide", async () => { + const tempRoot = await fs.mkdtemp(path.join(os.tmpdir(), "sync-pi-prompt-collision-")) + const sourceSkillDir = path.join(tempRoot, "claude-skill") + + await fs.mkdir(sourceSkillDir, { recursive: true }) + await fs.writeFile( + path.join(sourceSkillDir, "SKILL.md"), + [ + "---", + "name: docs-skill", + "description: Docs skill", + "---", + "", + "# Docs skill", + "", + "Run /prompts:plan_review after this.", + ].join("\n"), + ) + + const config: ClaudeHomeConfig = { + skills: [ + { + name: "docs-skill", + sourceDir: sourceSkillDir, + skillPath: path.join(sourceSkillDir, "SKILL.md"), + }, + ], + commands: [ + { + name: "plan-review", + description: "First review", + body: "First body", + sourcePath: path.join(tempRoot, "commands", "plan-review.md"), + }, + { + name: "plan_review", + description: "Second review", + body: "Second body", + sourcePath: path.join(tempRoot, "commands", "plan_review.md"), + }, + ], + mcpServers: {}, + } + + await syncToPi(config, tempRoot) + + const syncedSkill = await fs.readFile(path.join(tempRoot, "skills", "docs-skill", "SKILL.md"), "utf8") + expect(syncedSkill).toContain("/plan-review-2") + + const promptNames = (await fs.readdir(path.join(tempRoot, "prompts"))).sort() + expect(promptNames).toEqual(["plan-review-2.md", "plan-review.md"]) + }) + + test("writes compat extension when skills-only config has Task calls", async () => { + const tempRoot = await fs.mkdtemp(path.join(os.tmpdir(), "sync-pi-skills-only-compat-")) + const sourceSkillDir = path.join(tempRoot, "claude-skill") + await fs.mkdir(sourceSkillDir, { recursive: true }) + await fs.writeFile( + path.join(sourceSkillDir, "SKILL.md"), + [ + "---", + "name: ce-plan", + "description: Plan workflow", + "---", + "", + "# Plan", + "", + "- Task compound-engineering:research:repo-research-analyst(feature_description)", + ].join("\n"), + ) + + const config: ClaudeHomeConfig = { + skills: [ + { + name: "ce-plan", + sourceDir: sourceSkillDir, + skillPath: path.join(sourceSkillDir, "SKILL.md"), + }, + ], + mcpServers: {}, + } + + await syncToPi(config, tempRoot) + + const compatPath = path.join(tempRoot, "extensions", "compound-engineering-compat.ts") + const compatContent = await fs.readFile(compatPath, "utf8") + expect(compatContent).toContain('name: "ce_subagent"') + }) + + test("writes the managed AGENTS block during sync publication", async () => { + const tempRoot = await fs.mkdtemp(path.join(os.tmpdir(), "sync-pi-agents-block-")) + + await syncToPi({ + skills: [], + commands: [ + { + name: "plan-review", + description: "Personal review", + body: "Review body", + sourcePath: path.join(tempRoot, "commands", "plan-review.md"), + }, + ], + mcpServers: {}, + }, tempRoot) + + const agentsPath = path.join(tempRoot, "AGENTS.md") + const agentsContent = await fs.readFile(agentsPath, "utf8") + expect(agentsContent).toContain("BEGIN COMPOUND PI TOOL MAP") + expect(agentsContent).toContain("ce_subagent") + expect(agentsContent).toContain("compound-engineering/mcporter.json (project sync)") + }) + + test("copies symlinked file assets when Pi sync materializes a skill", async () => { + const tempRoot = await fs.mkdtemp(path.join(os.tmpdir(), "sync-pi-symlink-asset-")) + const sourceSkillDir = path.join(tempRoot, "claude-skill") + const sharedAssetPath = path.join(sourceSkillDir, "shared.txt") + + await fs.mkdir(sourceSkillDir, { recursive: true }) + await fs.writeFile(sharedAssetPath, "shared asset\n") + await fs.symlink(sharedAssetPath, path.join(sourceSkillDir, "asset.txt")) + await fs.writeFile( + path.join(sourceSkillDir, "SKILL.md"), + [ + "---", + "name: ce-plan", + "description: Plan workflow", + "---", + "", + "# Plan", + "", + "- Task compound-engineering:research:repo-research-analyst(feature_description)", + ].join("\n"), + ) + + const config: ClaudeHomeConfig = { + skills: [ + { + name: "ce-plan", + sourceDir: sourceSkillDir, + skillPath: path.join(sourceSkillDir, "SKILL.md"), + }, + ], + mcpServers: {}, + } + + await syncToPi(config, tempRoot) + + const copiedAsset = await fs.readFile(path.join(tempRoot, "skills", "ce-plan", "asset.txt"), "utf8") + expect(copiedAsset).toBe("shared asset\n") + }) + + test("materializes top-level personal skills through trusted symlink entries without dropping in-boundary assets", async () => { + const tempRoot = await fs.mkdtemp(path.join(os.tmpdir(), "sync-pi-entry-symlink-internal-asset-")) + const actualSkillsRoot = path.join(tempRoot, "actual-skills") + const linkedSkillsRoot = path.join(tempRoot, "linked-skills") + const externalSkillDir = path.join(tempRoot, "external-skill") + const sharedAssetPath = path.join(externalSkillDir, "shared.txt") + + await fs.mkdir(actualSkillsRoot, { recursive: true }) + await fs.mkdir(externalSkillDir, { recursive: true }) + await fs.writeFile(sharedAssetPath, "shared asset\n") + await fs.symlink(sharedAssetPath, path.join(externalSkillDir, "asset.txt")) + await fs.writeFile( + path.join(externalSkillDir, "SKILL.md"), + [ + "---", + "name: docs-skill", + "description: External skill", + "---", + "", + "# Docs skill", + ].join("\n"), + ) + await fs.symlink(actualSkillsRoot, linkedSkillsRoot) + await fs.symlink(externalSkillDir, path.join(linkedSkillsRoot, "docs-skill")) + + await syncToPi({ + skills: [ + { + name: "docs-skill", + entryDir: path.join(linkedSkillsRoot, "docs-skill"), + trustedRoot: linkedSkillsRoot, + trustedBoundary: externalSkillDir, + sourceDir: externalSkillDir, + skillPath: path.join(linkedSkillsRoot, "docs-skill", "SKILL.md"), + }, + ], + mcpServers: {}, + }, tempRoot) + + expect(await fs.readFile(path.join(tempRoot, "skills", "docs-skill", "asset.txt"), "utf8")).toBe("shared asset\n") + }) + + test("skips symlinked file assets that escape the skill root during Pi sync materialization", async () => { + const tempRoot = await fs.mkdtemp(path.join(os.tmpdir(), "sync-pi-escaped-symlink-")) + const sourceSkillDir = path.join(tempRoot, "claude-skill") + const externalAssetDir = path.join(tempRoot, "shared") + const externalAssetPath = path.join(externalAssetDir, "shared.txt") + + await fs.mkdir(sourceSkillDir, { recursive: true }) + await fs.mkdir(externalAssetDir, { recursive: true }) + await fs.writeFile(externalAssetPath, "shared asset\n") + await fs.symlink(externalAssetPath, path.join(sourceSkillDir, "asset.txt")) + await fs.writeFile( + path.join(sourceSkillDir, "SKILL.md"), + [ + "---", + "name: ce-plan", + "description: Plan workflow", + "---", + "", + "- Task compound-engineering:research:repo-research-analyst(feature_description)", + ].join("\n"), + ) + + const config: ClaudeHomeConfig = { + skills: [ + { + name: "ce-plan", + sourceDir: sourceSkillDir, + skillPath: path.join(sourceSkillDir, "SKILL.md"), + }, + ], + mcpServers: {}, + } + + await syncToPi(config, tempRoot) + + await expect(fs.access(path.join(tempRoot, "skills", "ce-plan", "asset.txt"))).rejects.toBeDefined() + }) + + test("resolves installed-plugin namespaced refs during Claude-home Pi sync", async () => { + const tempRoot = await fs.mkdtemp(path.join(os.tmpdir(), "sync-pi-installed-plugin-refs-")) + const sourceSkillDir = path.join(tempRoot, "claude-skill") + + await fs.mkdir(sourceSkillDir, { recursive: true }) + await seedVerifiedInstallNameMaps(tempRoot, { + agents: { + "compound-engineering:research:repo-research-analyst": "repo-research-analyst", + }, + skills: { + "compound-engineering:ce-plan": "ce-plan", + }, + }) + await fs.writeFile( + path.join(sourceSkillDir, "SKILL.md"), + [ + "---", + "name: docs-skill", + "description: Uses installed plugin refs", + "---", + "", + "- Task compound-engineering:research:repo-research-analyst(feature_description)", + "- /skill:compound-engineering:ce-plan", + ].join("\n"), + ) + + const config: ClaudeHomeConfig = { + skills: [ + { + name: "docs-skill", + sourceDir: sourceSkillDir, + skillPath: path.join(sourceSkillDir, "SKILL.md"), + }, + ], + mcpServers: {}, + } + + await syncToPi(config, tempRoot) + + const syncedSkill = await fs.readFile(path.join(tempRoot, "skills", "docs-skill", "SKILL.md"), "utf8") + expect(syncedSkill).toContain('Run ce_subagent with agent="repo-research-analyst" and task="feature_description".') + expect(syncedSkill).toContain("/skill:ce-plan") + }) + + test("dedupes personal sync names against installed managed Pi targets", async () => { + const tempRoot = await fs.mkdtemp(path.join(os.tmpdir(), "sync-pi-installed-name-reservations-")) + const sourceSkillDir = path.join(tempRoot, "claude-skill") + + await fs.mkdir(sourceSkillDir, { recursive: true }) + await seedVerifiedInstallNameMaps(tempRoot, { + agents: { + "compound-engineering:research:repo-research-analyst": "repo-research-analyst", + }, + skills: { + "compound-engineering:ce:plan": "ce-plan", + }, + prompts: { + "compound-engineering:plan-review": "plan-review", + }, + }) + await fs.writeFile( + path.join(sourceSkillDir, "SKILL.md"), + [ + "---", + "name: ce:plan", + "description: Local personal plan skill", + "---", + "", + "# Plan", + ].join("\n"), + ) + + const config: ClaudeHomeConfig = { + skills: [ + { + name: "ce:plan", + sourceDir: sourceSkillDir, + skillPath: path.join(sourceSkillDir, "SKILL.md"), + }, + ], + commands: [ + { + name: "plan-review", + description: "Local personal plan review", + body: "Local review body", + sourcePath: path.join(tempRoot, "commands", "plan-review.md"), + }, + ], + mcpServers: {}, + } + + await syncToPi(config, tempRoot) + + expect(await fs.readFile(path.join(tempRoot, "skills", "ce-plan-2", "SKILL.md"), "utf8")).toContain("name: ce-plan-2") + expect(await fs.readFile(path.join(tempRoot, "prompts", "plan-review-2.md"), "utf8")).toContain("Local review body") + await expect(fs.access(path.join(tempRoot, "skills", "ce-plan", "SKILL.md"))).rejects.toBeDefined() + await expect(fs.access(path.join(tempRoot, "prompts", "plan-review.md"))).rejects.toBeDefined() + }) + + test("preserves unknown qualified /skill refs during Claude-home Pi sync", async () => { + const tempRoot = await fs.mkdtemp(path.join(os.tmpdir(), "sync-pi-preserve-qualified-refs-")) + const sourceSkillDir = path.join(tempRoot, "claude-skill") + + await fs.mkdir(sourceSkillDir, { recursive: true }) + await fs.writeFile( + path.join(sourceSkillDir, "SKILL.md"), + [ + "---", + "name: docs-skill", + "description: Keeps unresolved refs literal", + "---", + "", + "- /skill:unknown-plugin:ce-plan", + ].join("\n"), + ) + + const config: ClaudeHomeConfig = { + skills: [ + { + name: "docs-skill", + sourceDir: sourceSkillDir, + skillPath: path.join(sourceSkillDir, "SKILL.md"), + }, + ], + mcpServers: {}, + } + + await syncToPi(config, tempRoot) + + const syncedSkill = await fs.readFile(path.join(tempRoot, "skills", "docs-skill", "SKILL.md"), "utf8") + expect(syncedSkill).toContain("/skill:unknown-plugin:ce-plan") + }) + + test("does not let unverified install state reserve sync names", async () => { + const tempRoot = await fs.mkdtemp(path.join(os.tmpdir(), "sync-pi-unverified-install-reservation-")) + const sourceSkillDir = path.join(tempRoot, "claude-skill") + const projectManagedDir = path.join(tempRoot, "compound-engineering") + + await fs.mkdir(sourceSkillDir, { recursive: true }) + await fs.mkdir(projectManagedDir, { recursive: true }) + await fs.writeFile( + path.join(projectManagedDir, "compound-engineering-managed.json"), + JSON.stringify({ + version: 1, + install: { + nameMaps: { + skills: { + "compound-engineering:ce-plan": "ce-plan", + "claude-home:docs-skill": "docs-skill", + }, + prompts: { + "compound-engineering:plan-review": "plan-review", + "claude-home:plan-review": "plan-review", + }, + }, + }, + }, null, 2), + ) + await fs.writeFile( + path.join(sourceSkillDir, "SKILL.md"), + [ + "---", + "name: docs-skill", + "description: Local skill should keep stable unsuffixed name", + "---", + "", + "# Docs skill", + ].join("\n"), + ) + + await syncToPi({ + skills: [ + { + name: "docs-skill", + sourceDir: sourceSkillDir, + skillPath: path.join(sourceSkillDir, "SKILL.md"), + }, + ], + commands: [ + { + name: "plan-review", + description: "Local prompt should keep stable unsuffixed name", + body: "Local review body", + sourcePath: path.join(tempRoot, "commands", "plan-review.md"), + }, + ], + mcpServers: {}, + }, tempRoot) + + expect(await fs.readFile(path.join(tempRoot, "skills", "docs-skill", "SKILL.md"), "utf8")).toContain("name: docs-skill") + expect(await fs.readFile(path.join(tempRoot, "prompts", "plan-review.md"), "utf8")).toContain("Local review body") + await expect(fs.access(path.join(tempRoot, "skills", "docs-skill-2", "SKILL.md"))).rejects.toBeDefined() + await expect(fs.access(path.join(tempRoot, "prompts", "plan-review-2.md"))).rejects.toBeDefined() + }) + + test("does not let unverified sync state reserve sync names", async () => { + const tempRoot = await fs.mkdtemp(path.join(os.tmpdir(), "sync-pi-unverified-sync-reservation-")) + const sourceSkillDir = path.join(tempRoot, "claude-skill") + const projectManagedDir = path.join(tempRoot, "compound-engineering") + + await fs.mkdir(sourceSkillDir, { recursive: true }) + await fs.mkdir(projectManagedDir, { recursive: true }) + await fs.writeFile( + path.join(projectManagedDir, "compound-engineering-managed.json"), + JSON.stringify({ + version: 1, + sync: { + nameMaps: { + skills: { + "claude-home:docs-skill": "docs-skill", + }, + prompts: { + "claude-home:plan-review": "plan-review", + }, + }, + }, + }, null, 2), + ) + await fs.writeFile( + path.join(sourceSkillDir, "SKILL.md"), + [ + "---", + "name: docs-skill", + "description: Local skill should keep stable unsuffixed name", + "---", + "", + "# Docs skill", + ].join("\n"), + ) + + await syncToPi({ + skills: [ + { + name: "docs-skill", + sourceDir: sourceSkillDir, + skillPath: path.join(sourceSkillDir, "SKILL.md"), + }, + ], + commands: [ + { + name: "plan-review", + description: "Local prompt should keep stable unsuffixed name", + body: "Local review body", + sourcePath: path.join(tempRoot, "commands", "plan-review.md"), + }, + ], + mcpServers: {}, + }, tempRoot) + + expect(await fs.readFile(path.join(tempRoot, "skills", "docs-skill", "SKILL.md"), "utf8")).toContain("name: docs-skill") + expect(await fs.readFile(path.join(tempRoot, "prompts", "plan-review.md"), "utf8")).toContain("Local review body") + await expect(fs.access(path.join(tempRoot, "skills", "docs-skill-2", "SKILL.md"))).rejects.toBeDefined() + await expect(fs.access(path.join(tempRoot, "prompts", "plan-review-2.md"))).rejects.toBeDefined() + }) + + test("records trusted top-level personal skills as managed artifacts", async () => { + const tempRoot = await fs.mkdtemp(path.join(os.tmpdir(), "sync-pi-managed-materialized-only-")) + const actualSkillsRoot = path.join(tempRoot, "actual-skills") + const linkedSkillsRoot = path.join(tempRoot, "linked-skills") + const externalSkillDir = path.join(tempRoot, "external-skill") + const warnSpy = spyOn(console, "warn").mockImplementation(() => {}) + + await fs.mkdir(actualSkillsRoot, { recursive: true }) + await fs.mkdir(externalSkillDir, { recursive: true }) + await fs.writeFile( + path.join(externalSkillDir, "SKILL.md"), + [ + "---", + "name: docs-skill", + "description: External skill", + "---", + "", + "# External skill", + ].join("\n"), + ) + await fs.symlink(actualSkillsRoot, linkedSkillsRoot) + await fs.symlink(externalSkillDir, path.join(linkedSkillsRoot, "docs-skill")) + + await syncToPi({ + skills: [ + { + name: "docs-skill", + entryDir: path.join(linkedSkillsRoot, "docs-skill"), + trustedRoot: linkedSkillsRoot, + trustedBoundary: externalSkillDir, + sourceDir: externalSkillDir, + skillPath: path.join(linkedSkillsRoot, "docs-skill", "SKILL.md"), + }, + ], + mcpServers: {}, + }, tempRoot) + + const trust = await loadPiManagedStateWithTrust(resolvePiLayout(tempRoot, "sync")) + expect(trust.status).toBe("verified") + expect(trust.state?.sync.artifacts.some((artifact) => artifact.sourceName === "docs-skill" && artifact.kind === "synced-skill")).toBe(true) + expect(warnSpy).not.toHaveBeenCalled() + + warnSpy.mockRestore() + }) + + test("rejects unresolved first-party qualified prompt slash refs during Pi sync", async () => { + const tempRoot = await fs.mkdtemp(path.join(os.tmpdir(), "sync-pi-qualified-prompt-slash-reject-")) + const warnSpy = spyOn(console, "warn").mockImplementation(() => {}) + + await syncToPi({ + skills: [], + commands: [ + { + name: "plan-review", + description: "Contains unresolved qualified prompt ref", + body: "Run /prompts:compound-engineering:missing-prompt next.", + sourcePath: path.join(tempRoot, "commands", "plan-review.md"), + }, + ], + mcpServers: {}, + }, tempRoot) + + await expect(fs.access(path.join(resolvePiLayout(tempRoot, "sync").promptsDir, "plan-review.md"))).rejects.toBeDefined() + expect(warnSpy).toHaveBeenCalledWith(expect.stringContaining("Unsupported unresolved first-party qualified ref for Pi sync: compound-engineering:missing-prompt")) + + warnSpy.mockRestore() + }) + + test("does not record skipped unsafe skills in sync-managed aliases or shared resources", async () => { + const tempRoot = await fs.mkdtemp(path.join(os.tmpdir(), "sync-pi-skipped-skill-state-")) + const sourceSkillDir = path.join(tempRoot, "claude-skill") + const warnSpy = spyOn(console, "warn").mockImplementation(() => {}) + + await fs.mkdir(sourceSkillDir, { recursive: true }) + await fs.writeFile( + path.join(sourceSkillDir, "SKILL.md"), + [ + "---", + "name: docs/safe", + "description: Unsafe name should be skipped", + "---", + ].join("\n"), + ) + + await syncToPi({ + skills: [ + { + name: "docs/safe", + sourceDir: sourceSkillDir, + skillPath: path.join(sourceSkillDir, "SKILL.md"), + }, + ], + mcpServers: {}, + }, tempRoot) + + const trust = await loadPiManagedStateWithTrust(resolvePiLayout(tempRoot, "sync")) + expect(trust.status).toBe("missing") + expect(trust.state).toBeNull() + expect(warnSpy).toHaveBeenCalledWith(expect.stringContaining("Skipping skill with unsafe name")) + await expect(fs.access(path.join(tempRoot, "skills", "docs-safe", "SKILL.md"))).rejects.toBeDefined() + + warnSpy.mockRestore() + }) + + test("prunes stale skipped-skill aliases from prior sync state on rerun", async () => { + const tempRoot = await fs.mkdtemp(path.join(os.tmpdir(), "sync-pi-prune-stale-skipped-state-")) + const layout = resolvePiLayout(tempRoot, "sync") + const stateHome = path.join(tempRoot, "state-home") + const warnSpy = spyOn(console, "warn").mockImplementation(() => {}) + process.env.COMPOUND_ENGINEERING_HOME = stateHome + + const seededState = replacePiManagedSection(null, "sync", createPiManagedSection({ + nameMaps: { + skills: { + "claude-home:docs/safe": "docs-safe", + }, + }, + artifacts: [createManagedArtifact(layout, "synced-skill", "docs/safe", "docs-safe")], + sharedResources: { compatExtension: true }, + }), "compound-engineering") + await writePiManagedState(layout, seededState, { install: false, sync: true }) + + const sourceSkillDir = path.join(tempRoot, "claude-skill") + await fs.mkdir(sourceSkillDir, { recursive: true }) + await fs.writeFile(path.join(sourceSkillDir, "SKILL.md"), "---\nname: docs/safe\n---\n") + + await syncToPi({ + skills: [ + { + name: "docs/safe", + sourceDir: sourceSkillDir, + skillPath: path.join(sourceSkillDir, "SKILL.md"), + }, + ], + mcpServers: {}, + }, tempRoot) + + const trust = await loadPiManagedStateWithTrust(layout) + expect(trust.status).toBe("missing") + expect(trust.state).toBeNull() + expect(warnSpy).toHaveBeenCalledWith(expect.stringContaining("Skipping skill with unsafe name")) + + warnSpy.mockRestore() + delete process.env.COMPOUND_ENGINEERING_HOME + }) + + test("rewrites claude-home qualified refs from verified global sync aliases when the nearest project manifest lacks sync-scoped state", async () => { + const tempRoot = await fs.mkdtemp(path.join(os.tmpdir(), "sync-pi-unverified-local-ce-")) + const sourceSkillDir = path.join(tempRoot, "claude-skill") + const stateHome = path.join(tempRoot, "state-home") + const fakeHome = path.join(tempRoot, "home") + const globalPiRoot = path.join(fakeHome, ".pi", "agent") + const originalHome = process.env.HOME + process.env.COMPOUND_ENGINEERING_HOME = stateHome + process.env.HOME = fakeHome + + await fs.mkdir(sourceSkillDir, { recursive: true }) + await seedVerifiedSyncNameMaps(globalPiRoot, { + skills: { + "claude-home:ce-plan": "ce-plan-global", + }, + }) + + const projectManagedDir = path.join(tempRoot, "compound-engineering") + await fs.mkdir(projectManagedDir, { recursive: true }) + await fs.writeFile( + path.join(projectManagedDir, "compound-engineering-managed.json"), + JSON.stringify({ + version: 1, + install: { + nameMaps: { + skills: { + "compound-engineering:ce-plan": "ce-plan-local", + }, + }, + }, + }, null, 2), + ) + + await fs.writeFile( + path.join(sourceSkillDir, "SKILL.md"), + [ + "---", + "name: docs-skill", + "description: Allows verified global claude-home refs when local sync state is absent", + "---", + "", + "- /skill:claude-home:ce-plan", + ].join("\n"), + ) + + await syncToPi({ + skills: [ + { + name: "docs-skill", + sourceDir: sourceSkillDir, + skillPath: path.join(sourceSkillDir, "SKILL.md"), + }, + ], + mcpServers: {}, + }, tempRoot) + + const syncedSkill = await fs.readFile(path.join(tempRoot, "skills", "docs-skill", "SKILL.md"), "utf8") + expect(syncedSkill).toContain("/skill:ce-plan-global") + expect(syncedSkill).not.toContain("/skill:claude-home-ce-plan") + + delete process.env.COMPOUND_ENGINEERING_HOME + if (originalHome === undefined) { + delete process.env.HOME + } else { + process.env.HOME = originalHome + } + }) + + test("rewrites compound-engineering qualified refs from verified global install aliases during Pi sync", async () => { + const tempRoot = await fs.mkdtemp(path.join(os.tmpdir(), "sync-pi-global-install-ref-rewrite-")) + const sourceSkillDir = path.join(tempRoot, "claude-skill") + const stateHome = path.join(tempRoot, "state-home") + const fakeHome = path.join(tempRoot, "home") + const globalPiRoot = path.join(fakeHome, ".pi", "agent") + const originalHome = process.env.HOME + process.env.COMPOUND_ENGINEERING_HOME = stateHome + process.env.HOME = fakeHome + + await fs.mkdir(sourceSkillDir, { recursive: true }) + await seedVerifiedInstallNameMaps(globalPiRoot, { + skills: { + "compound-engineering:ce-plan": "ce-plan-global", + }, + }) + await fs.writeFile( + path.join(sourceSkillDir, "SKILL.md"), + [ + "---", + "name: docs-skill", + "description: Keeps verified global CE refs visible", + "---", + "", + "- /skill:compound-engineering:ce-plan", + "- Task compound-engineering:ce-plan(feature_description)", + ].join("\n"), + ) + + await syncToPi({ + skills: [ + { + name: "docs-skill", + sourceDir: sourceSkillDir, + skillPath: path.join(sourceSkillDir, "SKILL.md"), + }, + ], + mcpServers: {}, + }, tempRoot) + + const syncedSkill = await fs.readFile(path.join(tempRoot, "skills", "docs-skill", "SKILL.md"), "utf8") + expect(syncedSkill).toContain("/skill:ce-plan-global") + expect(syncedSkill).toContain('Run ce_subagent with agent="ce-plan-global" and task="feature_description".') + + delete process.env.COMPOUND_ENGINEERING_HOME + if (originalHome === undefined) { + delete process.env.HOME + } else { + process.env.HOME = originalHome + } + }) + + test("rewrites compound-engineering qualified refs from verified project-local install aliases during Pi sync", async () => { + const tempRoot = await fs.mkdtemp(path.join(os.tmpdir(), "sync-pi-local-install-ref-rewrite-")) + const sourceSkillDir = path.join(tempRoot, "claude-skill") + const stateHome = path.join(tempRoot, "state-home") + process.env.COMPOUND_ENGINEERING_HOME = stateHome + + await fs.mkdir(sourceSkillDir, { recursive: true }) + await seedVerifiedProjectInstallNameMaps(tempRoot, { + skills: { + "compound-engineering:ce-plan": "ce-plan-local-install", + }, + }) + await fs.writeFile( + path.join(sourceSkillDir, "SKILL.md"), + [ + "---", + "name: docs-skill", + "description: Keeps verified local CE refs visible", + "---", + "", + "- /skill:compound-engineering:ce-plan", + "- Task compound-engineering:ce-plan(feature_description)", + ].join("\n"), + ) + + await syncToPi({ + skills: [ + { + name: "docs-skill", + sourceDir: sourceSkillDir, + skillPath: path.join(sourceSkillDir, "SKILL.md"), + }, + ], + mcpServers: {}, + }, tempRoot) + + const syncedSkill = await fs.readFile(path.join(tempRoot, "skills", "docs-skill", "SKILL.md"), "utf8") + expect(syncedSkill).toContain("/skill:ce-plan-local-install") + expect(syncedSkill).toContain('Run ce_subagent with agent="ce-plan-local-install" and task="feature_description".') + + delete process.env.COMPOUND_ENGINEERING_HOME + }) + + test("reserves verified global sync emitted names before allocating local Claude-home names", async () => { + const tempRoot = await fs.mkdtemp(path.join(os.tmpdir(), "sync-pi-global-name-reservation-")) + const sourceSkillDir = path.join(tempRoot, "claude-skill") + const stateHome = path.join(tempRoot, "state-home") + const fakeHome = path.join(tempRoot, "home") + const globalPiRoot = path.join(fakeHome, ".pi", "agent") + const originalHome = process.env.HOME + process.env.COMPOUND_ENGINEERING_HOME = stateHome + process.env.HOME = fakeHome + + await fs.mkdir(sourceSkillDir, { recursive: true }) + await seedVerifiedSyncNameMaps(globalPiRoot, { + skills: { + "claude-home:ce-plan": "ce-plan", + }, + }) + await fs.writeFile( + path.join(sourceSkillDir, "SKILL.md"), + [ + "---", + "name: docs-skill", + "description: Keeps global names reserved", + "---", + "", + "- /skill:claude-home:ce-plan", + ].join("\n"), + ) + + await syncToPi({ + skills: [ + { + name: "ce-plan", + sourceDir: sourceSkillDir, + skillPath: path.join(sourceSkillDir, "SKILL.md"), + }, + { + name: "docs-skill", + sourceDir: sourceSkillDir, + skillPath: path.join(sourceSkillDir, "SKILL.md"), + }, + ], + mcpServers: {}, + }, tempRoot) + + expect(await fs.lstat(path.join(tempRoot, "skills", "ce-plan-2"))).toBeDefined() + const syncedSkill = await fs.readFile(path.join(tempRoot, "skills", "docs-skill", "SKILL.md"), "utf8") + expect(syncedSkill).toContain("/skill:ce-plan") + + delete process.env.COMPOUND_ENGINEERING_HOME + if (originalHome === undefined) { + delete process.env.HOME + } else { + process.env.HOME = originalHome + } + }) + + test("reserves verified global install emitted names before allocating local Pi names", async () => { + const tempRoot = await fs.mkdtemp(path.join(os.tmpdir(), "sync-pi-global-install-name-reservation-")) + const sourceSkillDir = path.join(tempRoot, "claude-skill") + const stateHome = path.join(tempRoot, "state-home") + const fakeHome = path.join(tempRoot, "home") + const globalPiRoot = path.join(fakeHome, ".pi", "agent") + const originalHome = process.env.HOME + process.env.COMPOUND_ENGINEERING_HOME = stateHome + process.env.HOME = fakeHome + + await fs.mkdir(sourceSkillDir, { recursive: true }) + await seedVerifiedInstallNameMaps(globalPiRoot, { + skills: { + "compound-engineering:ce-plan": "ce-plan", + }, + }) + await fs.writeFile( + path.join(sourceSkillDir, "SKILL.md"), + [ + "---", + "name: docs-skill", + "description: Keeps global install names reserved", + "---", + "", + "- /skill:compound-engineering:ce-plan", + ].join("\n"), + ) + + await syncToPi({ + skills: [ + { + name: "ce-plan", + sourceDir: sourceSkillDir, + skillPath: path.join(sourceSkillDir, "SKILL.md"), + }, + { + name: "docs-skill", + sourceDir: sourceSkillDir, + skillPath: path.join(sourceSkillDir, "SKILL.md"), + }, + ], + mcpServers: {}, + }, tempRoot) + + expect(await fs.lstat(path.join(tempRoot, "skills", "ce-plan-2"))).toBeDefined() + const syncedSkill = await fs.readFile(path.join(tempRoot, "skills", "docs-skill", "SKILL.md"), "utf8") + expect(syncedSkill).toContain("/skill:ce-plan") + + delete process.env.COMPOUND_ENGINEERING_HOME + if (originalHome === undefined) { + delete process.env.HOME + } else { + process.env.HOME = originalHome + } + }) + + test("reserves verified project-local install emitted names before allocating local Pi names", async () => { + const tempRoot = await fs.mkdtemp(path.join(os.tmpdir(), "sync-pi-local-install-name-reservation-")) + const sourceSkillDir = path.join(tempRoot, "claude-skill") + const stateHome = path.join(tempRoot, "state-home") + process.env.COMPOUND_ENGINEERING_HOME = stateHome + + await fs.mkdir(sourceSkillDir, { recursive: true }) + await seedVerifiedProjectInstallNameMaps(tempRoot, { + skills: { + "compound-engineering:ce-plan": "ce-plan", + }, + }) + await fs.writeFile( + path.join(sourceSkillDir, "SKILL.md"), + [ + "---", + "name: docs-skill", + "description: Keeps local install names reserved", + "---", + "", + "- /skill:compound-engineering:ce-plan", + ].join("\n"), + ) + + await syncToPi({ + skills: [ + { + name: "ce-plan", + sourceDir: sourceSkillDir, + skillPath: path.join(sourceSkillDir, "SKILL.md"), + }, + { + name: "docs-skill", + sourceDir: sourceSkillDir, + skillPath: path.join(sourceSkillDir, "SKILL.md"), + }, + ], + mcpServers: {}, + }, tempRoot) + + expect(await fs.lstat(path.join(tempRoot, "skills", "ce-plan-2"))).toBeDefined() + const syncedSkill = await fs.readFile(path.join(tempRoot, "skills", "docs-skill", "SKILL.md"), "utf8") + expect(syncedSkill).toContain("/skill:ce-plan") + + delete process.env.COMPOUND_ENGINEERING_HOME + }) + + test("compat runtime prefers verified nested install aliases over verified direct-root legacy install aliases", async () => { + const tempRoot = await fs.mkdtemp(path.join(os.tmpdir(), "sync-pi-runtime-nested-install-precedence-")) + const stateHome = path.join(tempRoot, "state-home") + const projectRoot = path.join(tempRoot, "project") + const nestedCwd = path.join(projectRoot, "nested", "cwd") + process.env.COMPOUND_ENGINEERING_HOME = stateHome + + await fs.mkdir(nestedCwd, { recursive: true }) + await seedVerifiedProjectInstallNameMaps(projectRoot, { + skills: { + "compound-engineering:ce-plan": "ce-plan-nested", + }, + }) + await seedVerifiedInstallNameMaps(projectRoot, { + skills: { + "compound-engineering:ce-plan": "ce-plan-direct-root", + }, + }) + + const { resolveAgentName } = await loadCompatHelpers(projectRoot) + expect(resolveAgentName(nestedCwd, "compound-engineering:ce-plan")).toBe("ce-plan-nested") + + delete process.env.COMPOUND_ENGINEERING_HOME + }) + + test("sync and runtime fall back to an independently verified direct-root install layer when nested install is invalid", async () => { + const tempRoot = await fs.mkdtemp(path.join(os.tmpdir(), "sync-pi-direct-root-install-fallback-")) + const stateHome = path.join(tempRoot, "state-home") + const projectRoot = path.join(tempRoot, "project") + const nestedCwd = path.join(projectRoot, "nested", "cwd") + const sourceSkillDir = path.join(tempRoot, "claude-skill") + process.env.COMPOUND_ENGINEERING_HOME = stateHome + + await fs.mkdir(nestedCwd, { recursive: true }) + await fs.mkdir(sourceSkillDir, { recursive: true }) + await seedVerifiedInstallNameMaps(projectRoot, { + skills: { + "compound-engineering:ce-plan": "ce-plan-direct-root", + }, + }) + const nestedInstallLayout = resolvePiLayout(projectRoot, "install") + await fs.mkdir(path.dirname(nestedInstallLayout.managedManifestPath), { recursive: true }) + await fs.writeFile(nestedInstallLayout.managedManifestPath, "{ invalid json\n") + + await fs.writeFile( + path.join(sourceSkillDir, "SKILL.md"), + [ + "---", + "name: docs-skill", + "description: falls back to verified direct-root install alias", + "---", + "", + "- /skill:compound-engineering:ce-plan", + ].join("\n"), + ) + + await syncToPi({ + skills: [ + { + name: "docs-skill", + sourceDir: sourceSkillDir, + skillPath: path.join(sourceSkillDir, "SKILL.md"), + }, + ], + mcpServers: {}, + }, projectRoot) + + const syncedSkill = await fs.readFile(path.join(projectRoot, "skills", "docs-skill", "SKILL.md"), "utf8") + expect(syncedSkill).toContain("/skill:ce-plan-direct-root") + const { resolveAgentName } = await loadCompatHelpers(projectRoot) + expect(resolveAgentName(nestedCwd, "compound-engineering:ce-plan")).toBe("ce-plan-direct-root") + + delete process.env.COMPOUND_ENGINEERING_HOME + }) + + test("sync rewrites unqualified skill refs against verified install precedence before global sync aliases", async () => { + const tempRoot = await fs.mkdtemp(path.join(os.tmpdir(), "sync-pi-install-over-global-sync-")) + const stateHome = path.join(tempRoot, "state-home") + const projectRoot = path.join(tempRoot, "project") + const sourceSkillDir = path.join(tempRoot, "claude-skill") + process.env.COMPOUND_ENGINEERING_HOME = stateHome + process.env.HOME = stateHome + + await fs.mkdir(sourceSkillDir, { recursive: true }) + await seedVerifiedProjectInstallNameMaps(projectRoot, { + skills: { + "compound-engineering:ce-plan": "ce-plan-local", + "ce-plan": "ce-plan-local", + }, + }) + await seedVerifiedGlobalSyncNameMaps(stateHome, { + skills: { + "claude-home:ce-plan": "ce-plan-global", + "ce-plan": "ce-plan-global", + }, + }) + + await fs.writeFile( + path.join(sourceSkillDir, "SKILL.md"), + [ + "---", + "name: docs-skill", + "description: prefers local install alias for unqualified refs", + "---", + "", + "- /skill:ce-plan", + ].join("\n"), + ) + + await syncToPi({ + skills: [ + { + name: "docs-skill", + sourceDir: sourceSkillDir, + skillPath: path.join(sourceSkillDir, "SKILL.md"), + }, + ], + mcpServers: {}, + }, projectRoot) + + const syncedSkill = await fs.readFile(path.join(projectRoot, "skills", "docs-skill", "SKILL.md"), "utf8") + expect(syncedSkill).toContain("/skill:ce-plan-local") + expect(syncedSkill).not.toContain("/skill:ce-plan-global") + + delete process.env.COMPOUND_ENGINEERING_HOME + delete process.env.HOME + }) + + test("compat runtime resolves unqualified names by nearest verified precedence before raising conflicts", async () => { + const tempRoot = await fs.mkdtemp(path.join(os.tmpdir(), "sync-pi-runtime-unqualified-precedence-")) + const stateHome = path.join(tempRoot, "state-home") + const projectRoot = path.join(tempRoot, "project") + const nestedCwd = path.join(projectRoot, "nested", "cwd") + process.env.COMPOUND_ENGINEERING_HOME = stateHome + process.env.HOME = stateHome + + await fs.mkdir(nestedCwd, { recursive: true }) + await seedVerifiedProjectInstallNameMaps(projectRoot, { + skills: { + "compound-engineering:ce-plan": "ce-plan-local", + "ce-plan": "ce-plan-local", + }, + }) + await seedVerifiedInstallNameMaps(stateHome, { + skills: { + "compound-engineering:ce-plan": "ce-plan-global-install", + "ce-plan": "ce-plan-global-install", + }, + }) + + const { resolveAgentName } = await loadCompatHelpers(projectRoot) + expect(resolveAgentName(nestedCwd, "ce-plan")).toBe("ce-plan-local") + + delete process.env.COMPOUND_ENGINEERING_HOME + delete process.env.HOME + }) + + test("compat runtime prefers project install aliases for unqualified names over same-root sync aliases", async () => { + const tempRoot = await fs.mkdtemp(path.join(os.tmpdir(), "sync-pi-runtime-same-root-unqualified-precedence-")) + const stateHome = path.join(tempRoot, "state-home") + const projectRoot = path.join(tempRoot, "project") + const nestedCwd = path.join(projectRoot, "nested", "cwd") + process.env.COMPOUND_ENGINEERING_HOME = stateHome + process.env.HOME = stateHome + + await fs.mkdir(nestedCwd, { recursive: true }) + await seedVerifiedProjectInstallNameMaps(projectRoot, { + skills: { + "compound-engineering:ce-plan": "ce-plan-install", + "ce-plan": "ce-plan-install", + }, + }) + await seedVerifiedSyncNameMaps(projectRoot, { + skills: { + "claude-home:ce-plan": "ce-plan-sync", + "ce-plan": "ce-plan-sync", + }, + }) + + const { resolveAgentName } = await loadCompatHelpers(projectRoot) + expect(resolveAgentName(nestedCwd, "ce-plan")).toBe("ce-plan-install") + + delete process.env.COMPOUND_ENGINEERING_HOME + delete process.env.HOME + }) + + test("compat runtime ignores repo-local sibling manifests as bundled fallback candidates", async () => { + const tempRoot = await fs.mkdtemp(path.join(os.tmpdir(), "sync-pi-bundled-fallback-")) + const moduleRoot = path.join(tempRoot, "runtime-root") + const workspaceRoot = path.join(tempRoot, "workspace") + + await fs.mkdir(path.join(moduleRoot, "pi-resources", "compound-engineering"), { recursive: true }) + await fs.mkdir(path.join(moduleRoot, "compound-engineering"), { recursive: true }) + await fs.mkdir(workspaceRoot, { recursive: true }) + + await fs.writeFile( + path.join(moduleRoot, "pi-resources", "compound-engineering", "compound-engineering-managed.json"), + JSON.stringify({ + version: 1, + install: { + nameMaps: { + skills: { + "compound-engineering:ce-plan": "ce-plan-bundled", + }, + }, + }, + }, null, 2), + ) + await fs.writeFile( + path.join(moduleRoot, "compound-engineering", "compound-engineering-managed.json"), + JSON.stringify({ + version: 1, + install: { + nameMaps: { + skills: { + "compound-engineering:ce-plan": "ce-plan-sibling", + }, + }, + }, + }, null, 2), + ) + + const { resolveAgentName } = await loadCompatHelpers(moduleRoot) + + expect(() => resolveAgentName(workspaceRoot, "compound-engineering:ce-plan")).toThrow("Unknown qualified subagent target") + expect(() => resolveAgentName(moduleRoot, "compound-engineering:ce-plan")).toThrow("Unknown qualified subagent target") + }) + + test("compat runtime discovers sync layout aliases and mcporter config from nested cwd", async () => { + const tempRoot = await fs.mkdtemp(path.join(os.tmpdir(), "sync-pi-runtime-sync-layout-")) + const stateHome = path.join(tempRoot, "state-home") + const projectRoot = path.join(tempRoot, "project") + const nestedCwd = path.join(projectRoot, "apps", "docs") + process.env.COMPOUND_ENGINEERING_HOME = stateHome + + await fs.mkdir(nestedCwd, { recursive: true }) + await seedVerifiedSyncNameMaps(projectRoot, { + skills: { + "claude-home:ce-plan": "ce-plan-sync", + }, + }, { + sharedResources: { mcporterConfig: true }, + }) + await fs.mkdir(path.join(projectRoot, "compound-engineering"), { recursive: true }) + await fs.writeFile(path.join(projectRoot, "compound-engineering", "mcporter.json"), JSON.stringify({ mcpServers: {} }, null, 2)) + + const overridePath = path.join(tempRoot, "override-mcporter.json") + const { resolveAgentName, resolveMcporterConfigPath } = await loadCompatHelpers(projectRoot) + + expect(resolveAgentName(nestedCwd, "claude-home:ce-plan")).toBe("ce-plan-sync") + expect(resolveMcporterConfigPath(nestedCwd)).toBe(path.join(projectRoot, "compound-engineering", "mcporter.json")) + expect(resolveMcporterConfigPath(nestedCwd, overridePath)).toBe(path.join(projectRoot, "compound-engineering", "mcporter.json")) + + delete process.env.COMPOUND_ENGINEERING_HOME + }) + + test("compat runtime rejects cwd escapes outside the active workspace", async () => { + const tempRoot = await fs.mkdtemp(path.join(os.tmpdir(), "sync-pi-runtime-cwd-boundary-")) + const stateHome = path.join(tempRoot, "state-home") + const projectRoot = path.join(tempRoot, "project") + const nestedCwd = path.join(projectRoot, "apps", "docs") + const externalRoot = path.join(tempRoot, "external") + process.env.COMPOUND_ENGINEERING_HOME = stateHome + + await fs.mkdir(nestedCwd, { recursive: true }) + await fs.mkdir(externalRoot, { recursive: true }) + + const linkPath = path.join(projectRoot, "linked-external") + await fs.symlink(externalRoot, linkPath) + + const { resolveTaskCwd } = await loadCompatHelpers(projectRoot) + + expect(resolveTaskCwd(projectRoot, "apps/docs")).toBe(path.join(projectRoot, "apps", "docs")) + expect(() => resolveTaskCwd(projectRoot, "../external")).toThrow("outside the active workspace") + expect(() => resolveTaskCwd(projectRoot, externalRoot)).toThrow("outside the active workspace") + expect(() => resolveTaskCwd(projectRoot, "~")).toThrow("outside the active workspace") + expect(() => resolveTaskCwd(projectRoot, "linked-external")).toThrow("outside the active workspace") + + delete process.env.COMPOUND_ENGINEERING_HOME + }) + + test("compat runtime anchors cwd checks to the authoritative workspace root, not the nested invocation dir", async () => { + const tempRoot = await fs.mkdtemp(path.join(os.tmpdir(), "sync-pi-runtime-workspace-root-cwd-")) + const stateHome = path.join(tempRoot, "state-home") + const projectRoot = path.join(tempRoot, "project") + const nestedCwd = path.join(projectRoot, "apps", "docs") + const siblingDir = path.join(projectRoot, "apps", "api") + process.env.COMPOUND_ENGINEERING_HOME = stateHome + + await fs.mkdir(nestedCwd, { recursive: true }) + await fs.mkdir(siblingDir, { recursive: true }) + await seedVerifiedSyncNameMaps(projectRoot, {}, { + sharedResources: { compatExtension: true }, + }) + + const { resolveTaskCwd } = await loadCompatHelpers(projectRoot) + expect(resolveTaskCwd(nestedCwd, "../api")).toBe(siblingDir) + expect(resolveTaskCwd(nestedCwd, siblingDir)).toBe(siblingDir) + + delete process.env.COMPOUND_ENGINEERING_HOME + }) + + test("compat runtime allows same-workspace sibling cwd navigation without verified project manifests", async () => { + const tempRoot = await fs.mkdtemp(path.join(os.tmpdir(), "sync-pi-runtime-workspace-fallback-")) + const nestedCwd = path.join(tempRoot, "apps", "docs") + const siblingDir = path.join(tempRoot, "apps", "api") + + await fs.mkdir(path.join(tempRoot, ".git"), { recursive: true }) + await fs.mkdir(nestedCwd, { recursive: true }) + await fs.mkdir(siblingDir, { recursive: true }) + + const { resolveTaskCwd } = await loadCompatHelpers(tempRoot) + expect(resolveTaskCwd(nestedCwd, "../api")).toBe(siblingDir) + expect(resolveTaskCwd(nestedCwd, siblingDir)).toBe(siblingDir) + }) + + test("compat runtime falls back to filesystem workspace detection when project trust is stale", async () => { + const tempRoot = await fs.mkdtemp(path.join(os.tmpdir(), "sync-pi-runtime-stale-workspace-root-")) + const stateHome = path.join(tempRoot, "state-home") + const projectRoot = path.join(tempRoot, "project") + const nestedCwd = path.join(projectRoot, "apps", "docs") + const siblingDir = path.join(projectRoot, "apps", "api") + process.env.COMPOUND_ENGINEERING_HOME = stateHome + + await fs.mkdir(path.join(projectRoot, ".git"), { recursive: true }) + await fs.mkdir(nestedCwd, { recursive: true }) + await fs.mkdir(siblingDir, { recursive: true }) + await seedVerifiedSyncNameMaps(projectRoot, {}, { + sharedResources: { compatExtension: true }, + }) + + const layout = resolvePiLayout(projectRoot, "sync") + const verification = JSON.parse(await fs.readFile(layout.verificationPath, "utf8")) as any + verification.sync.hash = "stale:hash" + await fs.writeFile(layout.verificationPath, JSON.stringify(verification, null, 2)) + + const { resolveTaskCwd } = await loadCompatHelpers(projectRoot) + expect(resolveTaskCwd(nestedCwd, "../api")).toBe(siblingDir) + + delete process.env.COMPOUND_ENGINEERING_HOME + }) + + test("ce_subagent rejects invalid parallel cwd values before launching any tasks", async () => { + const tempRoot = await fs.mkdtemp(path.join(os.tmpdir(), "sync-pi-runtime-cwd-preflight-")) + const stateHome = path.join(tempRoot, "state-home") + const projectRoot = path.join(tempRoot, "project") + process.env.COMPOUND_ENGINEERING_HOME = stateHome + await fs.mkdir(projectRoot, { recursive: true }) + await seedVerifiedProjectInstallNameMaps(projectRoot, { + skills: { + "compound-engineering:repo-research-analyst": "repo-research-analyst", + }, + }) + + const mod = await loadCompatHelpers(projectRoot) + const tools = new Map any }>() + let execCalls = 0 + mod.default({ + registerTool(tool) { + tools.set(tool.name, tool) + }, + async exec() { + execCalls += 1 + return { code: 0, stdout: "ok", stderr: "" } + }, + }) + + const subagent = tools.get("ce_subagent") + expect(subagent).toBeDefined() + + const result = await subagent!.execute( + "tool-call-id", + { + tasks: [ + { agent: "compound-engineering:repo-research-analyst", task: "safe", cwd: "." }, + { agent: "compound-engineering:repo-research-analyst", task: "unsafe", cwd: "../external" }, + ], + }, + undefined, + undefined, + { cwd: projectRoot }, + ) + + expect(result.isError).toBe(true) + expect(String(result.content?.[0]?.text ?? "")).toContain("outside the active workspace") + expect(execCalls).toBe(0) + + delete process.env.COMPOUND_ENGINEERING_HOME + }) + + test("ce_list_capabilities exposes the current verified runtime capability set", async () => { + const tempRoot = await fs.mkdtemp(path.join(os.tmpdir(), "sync-pi-capability-discovery-")) + const stateHome = path.join(tempRoot, "state-home") + const projectRoot = path.join(tempRoot, "project") + const nestedCwd = path.join(projectRoot, "nested", "cwd") + process.env.COMPOUND_ENGINEERING_HOME = stateHome + + await fs.mkdir(nestedCwd, { recursive: true }) + await seedVerifiedProjectInstallNameMaps(projectRoot, { + agents: { + "compound-engineering:ce:plan": "ce-plan", + }, + skills: { + "compound-engineering:repo-research-analyst": "repo-research-analyst", + }, + }) + await seedVerifiedSyncNameMaps(projectRoot, { + prompts: { + "claude-home:plan-review": "plan-review", + }, + }) + + const mod = await loadCompatHelpers(projectRoot) + const tools = new Map any }>() + mod.default({ + registerTool(tool) { + tools.set(tool.name, tool) + }, + async exec() { + return { code: 0, stdout: "ok", stderr: "" } + }, + }) + + const capabilities = tools.get("ce_list_capabilities") + expect(capabilities).toBeDefined() + + const result = await capabilities!.execute("tool-call-id", {}, undefined, undefined, { cwd: nestedCwd }) + expect(result.isError).toBeUndefined() + const details = result.details as { + install: { agents: string[]; skills: string[]; prompts: string[] } + sync: { agents: string[]; skills: string[]; prompts: string[] } + unqualified: { agents: string[]; skills: string[]; prompts: string[] } + shared: { mcporter: { available: boolean; source: string | null; servers: string[] } } + } + expect(details.install.agents).toContain("compound-engineering:ce:plan") + expect(details.install.skills).toContain("compound-engineering:repo-research-analyst") + expect(details.sync.prompts).toContain("claude-home:plan-review") + expect(details.unqualified.agents).toContain("compound-engineering:ce:plan") + expect(details.shared.mcporter).toMatchObject({ available: false, source: null, servers: [] }) + + delete process.env.COMPOUND_ENGINEERING_HOME + }) + + test("ce_run_prompt executes a verified prompt alias inside the active workspace", async () => { + const tempRoot = await fs.mkdtemp(path.join(os.tmpdir(), "sync-pi-run-prompt-")) + const stateHome = path.join(tempRoot, "state-home") + const projectRoot = path.join(tempRoot, "project") + const nestedCwd = path.join(projectRoot, "nested", "cwd") + process.env.COMPOUND_ENGINEERING_HOME = stateHome + + await fs.mkdir(nestedCwd, { recursive: true }) + await seedVerifiedSyncNameMaps(projectRoot, { + prompts: { + "claude-home:plan-review": "plan-review", + }, + }) + + const mod = await loadCompatHelpers(projectRoot) + const tools = new Map any }>() + const execCalls: Array<{ command: string; args: string[] }> = [] + mod.default({ + registerTool(tool) { + tools.set(tool.name, tool) + }, + async exec(command: string, args: string[]) { + execCalls.push({ command, args }) + return { code: 0, stdout: "ok", stderr: "" } + }, + }) + + const runPrompt = tools.get("ce_run_prompt") + expect(runPrompt).toBeDefined() + + const result = await runPrompt!.execute( + "tool-call-id", + { prompt: "claude-home:plan-review", args: "now" }, + undefined, + undefined, + { cwd: nestedCwd }, + ) + + expect(result.isError).toBe(false) + expect(execCalls).toHaveLength(1) + expect(execCalls[0]?.command).toBe("bash") + expect(execCalls[0]?.args.join(" ")).toContain("pi --no-session -p") + expect(execCalls[0]?.args.join(" ")).toContain("/plan-review now") + + delete process.env.COMPOUND_ENGINEERING_HOME + }) + + test("ce_run_prompt resolves alias manifest signatures once per execution path when cwd is unchanged", async () => { + const tempRoot = await fs.mkdtemp(path.join(os.tmpdir(), "sync-pi-run-prompt-signatures-")) + const stateHome = path.join(tempRoot, "state-home") + const projectRoot = path.join(tempRoot, "project") + const nestedCwd = path.join(projectRoot, "nested", "cwd") + process.env.COMPOUND_ENGINEERING_HOME = stateHome + + await fs.mkdir(nestedCwd, { recursive: true }) + await seedVerifiedSyncNameMaps(projectRoot, { + prompts: { + "claude-home:plan-review": "plan-review", + }, + }) + + const mod = await loadCompatHelpers(projectRoot) + const tools = new Map any }>() + const signaturePaths: string[] = [] + mod.default({ + registerTool(tool) { + tools.set(tool.name, tool) + }, + async exec() { + return { code: 0, stdout: "ok", stderr: "" } + }, + }) + mod.setAliasManifestSignatureHookForTests((filePath) => { + signaturePaths.push(filePath) + }) + + const runPrompt = tools.get("ce_run_prompt") + expect(runPrompt).toBeDefined() + + const result = await runPrompt!.execute( + "tool-call-id", + { prompt: "claude-home:plan-review" }, + undefined, + undefined, + { cwd: nestedCwd }, + ) + + expect(result.isError).toBe(false) + expect(signaturePaths).toHaveLength(7) + + delete process.env.COMPOUND_ENGINEERING_HOME + }) + + test("ce_run_prompt rejects unknown qualified prompt targets", async () => { + const tempRoot = await fs.mkdtemp(path.join(os.tmpdir(), "sync-pi-run-prompt-reject-")) + const stateHome = path.join(tempRoot, "state-home") + process.env.COMPOUND_ENGINEERING_HOME = stateHome + + const mod = await loadCompatHelpers(tempRoot) + const tools = new Map any }>() + let execCalls = 0 + mod.default({ + registerTool(tool) { + tools.set(tool.name, tool) + }, + async exec() { + execCalls += 1 + return { code: 0, stdout: "ok", stderr: "" } + }, + }) + + const runPrompt = tools.get("ce_run_prompt") + expect(runPrompt).toBeDefined() + + const result = await runPrompt!.execute( + "tool-call-id", + { prompt: "unknown-plugin:plan-review" }, + undefined, + undefined, + { cwd: tempRoot }, + ) + + expect(result.isError).toBe(true) + expect(String(result.content?.[0]?.text ?? "")).toContain("Unknown qualified prompt target") + expect(execCalls).toBe(0) + + delete process.env.COMPOUND_ENGINEERING_HOME + }) + + test("ce_run_prompt rejects unmanaged unqualified prompt targets", async () => { + const tempRoot = await fs.mkdtemp(path.join(os.tmpdir(), "sync-pi-run-prompt-unqualified-reject-")) + const stateHome = path.join(tempRoot, "state-home") + process.env.COMPOUND_ENGINEERING_HOME = stateHome + + const mod = await loadCompatHelpers(tempRoot) + const tools = new Map any }>() + let execCalls = 0 + mod.default({ + registerTool(tool) { + tools.set(tool.name, tool) + }, + async exec() { + execCalls += 1 + return { code: 0, stdout: "ok", stderr: "" } + }, + }) + + const runPrompt = tools.get("ce_run_prompt") + expect(runPrompt).toBeDefined() + + const result = await runPrompt!.execute( + "tool-call-id", + { prompt: "ambient-prompt" }, + undefined, + undefined, + { cwd: tempRoot }, + ) + + expect(result.isError).toBe(true) + expect(String(result.content?.[0]?.text ?? "")).toContain("Unknown prompt target") + expect(execCalls).toBe(0) + + delete process.env.COMPOUND_ENGINEERING_HOME + }) + + test("ce_subagent rejects unmanaged unqualified agent targets", async () => { + const tempRoot = await fs.mkdtemp(path.join(os.tmpdir(), "sync-pi-subagent-unqualified-reject-")) + const stateHome = path.join(tempRoot, "state-home") + process.env.COMPOUND_ENGINEERING_HOME = stateHome + + const mod = await loadCompatHelpers(tempRoot) + const tools = new Map any }>() + let execCalls = 0 + mod.default({ + registerTool(tool) { + tools.set(tool.name, tool) + }, + async exec() { + execCalls += 1 + return { code: 0, stdout: "ok", stderr: "" } + }, + }) + + const subagent = tools.get("ce_subagent") + expect(subagent).toBeDefined() + + const result = await subagent!.execute( + "tool-call-id", + { agent: "ambient-agent", task: "do work" }, + undefined, + undefined, + { cwd: tempRoot }, + ) + + expect(result.isError).toBe(true) + expect(String(result.content?.[0]?.text ?? "")).toContain("Unknown subagent target") + expect(execCalls).toBe(0) + + delete process.env.COMPOUND_ENGINEERING_HOME + }) + + test("persists the current Pi policy fingerprint in sync managed state", async () => { + const tempRoot = await fs.mkdtemp(path.join(os.tmpdir(), "sync-pi-policy-fingerprint-")) + const stateHome = path.join(tempRoot, "state-home") + process.env.COMPOUND_ENGINEERING_HOME = stateHome + + await syncToPi({ + commands: [ + { + name: "plan-review", + description: "Writes sync state", + body: "Body", + sourcePath: path.join(tempRoot, "commands", "plan-review.md"), + }, + ], + skills: [], + mcpServers: {}, + }, tempRoot) + + const trust = await loadPiManagedStateWithTrust(resolvePiLayout(tempRoot, "sync")) + expect(trust.state?.policyFingerprint).toBe(getPiPolicyFingerprint()) + + delete process.env.COMPOUND_ENGINEERING_HOME + }) + + test("treats sync managed state as stale when the policy fingerprint changes", async () => { + const tempRoot = await fs.mkdtemp(path.join(os.tmpdir(), "sync-pi-policy-fingerprint-stale-")) + const stateHome = path.join(tempRoot, "state-home") + process.env.COMPOUND_ENGINEERING_HOME = stateHome + + setPiPolicyFingerprintForTests("policy-v1") + await syncToPi({ + commands: [ + { + name: "plan-review", + description: "Writes sync state", + body: "Body", + sourcePath: path.join(tempRoot, "commands", "plan-review.md"), + }, + ], + skills: [], + mcpServers: {}, + }, tempRoot) + + setPiPolicyFingerprintForTests("policy-v2") + const trust = await loadPiManagedStateWithTrust(resolvePiLayout(tempRoot, "sync")) + expect(trust.status).toBe("stale") + expect(trust.verifiedSections.sync).toBe(false) + + delete process.env.COMPOUND_ENGINEERING_HOME + }) + + test("compat runtime stops trusting aliases after only policy trust inputs change", async () => { + const tempRoot = await fs.mkdtemp(path.join(os.tmpdir(), "sync-pi-runtime-policy-stale-")) + const stateHome = path.join(tempRoot, "state-home") + const projectRoot = path.join(tempRoot, "project") + const nestedCwd = path.join(projectRoot, "nested") + process.env.COMPOUND_ENGINEERING_HOME = stateHome + + await fs.mkdir(nestedCwd, { recursive: true }) + setPiPolicyFingerprintForTests("policy-v1") + await seedVerifiedSyncNameMaps(projectRoot, { + skills: { + "claude-home:ce-plan": "ce-plan-sync", + }, + }) + + process.env.COMPOUND_ENGINEERING_PI_POLICY_FINGERPRINT = "policy-v1" + const { resolveAgentName } = await loadCompatHelpers(projectRoot) + expect(resolveAgentName(nestedCwd, "claude-home:ce-plan")).toBe("ce-plan-sync") + + process.env.COMPOUND_ENGINEERING_PI_POLICY_FINGERPRINT = "policy-v2" + expect(() => resolveAgentName(nestedCwd, "claude-home:ce-plan")).toThrow("Unknown qualified subagent target") + + delete process.env.COMPOUND_ENGINEERING_HOME + }) + + test("runtime alias resolution reuses one ancestor-walk path set per lookup", async () => { + const tempRoot = await fs.mkdtemp(path.join(os.tmpdir(), "sync-pi-runtime-alias-walk-proof-")) + const stateHome = path.join(tempRoot, "state-home") + const projectRoot = path.join(tempRoot, "project") + const nestedCwd = path.join(projectRoot, "nested", "cwd") + process.env.COMPOUND_ENGINEERING_HOME = stateHome + + await fs.mkdir(nestedCwd, { recursive: true }) + await seedVerifiedProjectInstallNameMaps(projectRoot, { + agents: { + "compound-engineering:ce:plan": "ce-plan", + }, + }) + + const helpers = await loadCompatHelpers(projectRoot) + let signatureCalls = 0 + helpers.setAliasManifestSignatureHookForTests(() => { + signatureCalls += 1 + }) + + expect(helpers.resolveAgentName(nestedCwd, "compound-engineering:ce:plan")).toBe("ce-plan") + expect(signatureCalls).toBeLessThanOrEqual(8) + + delete process.env.COMPOUND_ENGINEERING_HOME + }) + + test("mcporter_list ignores stale direct callers that still send configPath", async () => { + const tempRoot = await fs.mkdtemp(path.join(os.tmpdir(), "sync-pi-runtime-configpath-direct-caller-")) + const stateHome = path.join(tempRoot, "state-home") + process.env.COMPOUND_ENGINEERING_HOME = stateHome + await fs.mkdir(tempRoot, { recursive: true }) + const warnSpy = spyOn(console, "warn").mockImplementation(() => {}) + + const mod = await loadCompatHelpers(tempRoot) + const tools = new Map any }>() + let execCalls = 0 + mod.default({ + registerTool(tool) { + tools.set(tool.name, tool) + }, + async exec() { + execCalls += 1 + return { code: 0, stdout: "ok", stderr: "" } + }, + }) + + const mcporterList = tools.get("mcporter_list") + expect(mcporterList).toBeDefined() + + const result = await mcporterList!.execute( + "tool-call-id", + { server: "context7", configPath: path.join(tempRoot, "override.json") }, + undefined, + undefined, + { cwd: tempRoot }, + ) + expect(result.isError).toBe(false) + expect(execCalls).toBe(1) + expect(warnSpy).toHaveBeenCalledWith(expect.stringContaining("configPath is deprecated and ignored")) + + warnSpy.mockRestore() + delete process.env.COMPOUND_ENGINEERING_HOME + }) + + test("compat runtime blocks global mcporter fallback when a nearer project config is unverified", async () => { + const tempRoot = await fs.mkdtemp(path.join(os.tmpdir(), "sync-pi-runtime-unverified-mcp-fallback-")) + const stateHome = path.join(tempRoot, "state-home") + const fakeHome = path.join(tempRoot, "home") + const projectRoot = path.join(tempRoot, "project") + const nestedCwd = path.join(projectRoot, "apps", "docs") + const originalHome = process.env.HOME + process.env.COMPOUND_ENGINEERING_HOME = stateHome + process.env.HOME = fakeHome + + await fs.mkdir(nestedCwd, { recursive: true }) + await fs.mkdir(path.join(projectRoot, "compound-engineering"), { recursive: true }) + await fs.writeFile( + path.join(projectRoot, "compound-engineering", "compound-engineering-managed.json"), + JSON.stringify({ + version: 1, + sync: { + sharedResources: { mcporterConfig: true }, + }, + }, null, 2), + ) + await fs.writeFile(path.join(projectRoot, "compound-engineering", "mcporter.json"), JSON.stringify({ mcpServers: { local: {} } }, null, 2)) + + const globalRoot = path.join(fakeHome, ".pi", "agent") + const globalLayout = resolvePiLayout(globalRoot, "sync") + await fs.mkdir(path.dirname(globalLayout.mcporterConfigPath), { recursive: true }) + await fs.writeFile(globalLayout.mcporterConfigPath, JSON.stringify({ mcpServers: { global: {} } }, null, 2)) + await writePiManagedState( + globalLayout, + replacePiManagedSection(null, "sync", createPiManagedSection({ + nameMaps: {}, + sharedResources: { mcporterConfig: true }, + }), "compound-engineering"), + { install: false, sync: true }, + ) + + const { resolveMcporterConfigPath } = await loadCompatHelpers(projectRoot) + expect(resolveMcporterConfigPath(nestedCwd)).toBeUndefined() + + delete process.env.COMPOUND_ENGINEERING_HOME + if (originalHome === undefined) { + delete process.env.HOME + } else { + process.env.HOME = originalHome + } + }) + + test("compat runtime resolves install and sync namespaces from different project layouts at the same root", async () => { + const tempRoot = await fs.mkdtemp(path.join(os.tmpdir(), "sync-pi-runtime-dual-layout-")) + const stateHome = path.join(tempRoot, "state-home") + const projectRoot = path.join(tempRoot, "project") + const nestedCwd = path.join(projectRoot, "nested", "cwd") + process.env.COMPOUND_ENGINEERING_HOME = stateHome + + await fs.mkdir(nestedCwd, { recursive: true }) + await seedVerifiedProjectInstallNameMaps(projectRoot, { + skills: { + "compound-engineering:ce-plan": "ce-plan-install", + }, + }) + await seedVerifiedSyncNameMaps(projectRoot, { + skills: { + "claude-home:ce-plan": "ce-plan-sync", + }, + }, { + sharedResources: { mcporterConfig: true }, + }) + await fs.mkdir(path.join(projectRoot, ".pi", "compound-engineering"), { recursive: true }) + await fs.writeFile(path.join(projectRoot, ".pi", "compound-engineering", "mcporter.json"), JSON.stringify({ mcpServers: { install: {} } }, null, 2)) + await fs.mkdir(path.join(projectRoot, "compound-engineering"), { recursive: true }) + await fs.writeFile(path.join(projectRoot, "compound-engineering", "mcporter.json"), JSON.stringify({ mcpServers: { sync: {} } }, null, 2)) + + const { resolveAgentName, resolveMcporterConfigPath } = await loadCompatHelpers(projectRoot) + + expect(resolveAgentName(nestedCwd, "compound-engineering:ce-plan")).toBe("ce-plan-install") + expect(resolveAgentName(nestedCwd, "claude-home:ce-plan")).toBe("ce-plan-sync") + expect(resolveMcporterConfigPath(nestedCwd)).toBe(path.join(projectRoot, "compound-engineering", "mcporter.json")) + + delete process.env.COMPOUND_ENGINEERING_HOME + }) + + test("compat runtime resolves verified legacy top-level name maps for both namespaces", async () => { + const tempRoot = await fs.mkdtemp(path.join(os.tmpdir(), "sync-pi-runtime-legacy-top-level-")) + const stateHome = path.join(tempRoot, "state-home") + const projectRoot = path.join(tempRoot, "project") + const nestedCwd = path.join(projectRoot, "nested", "cwd") + process.env.COMPOUND_ENGINEERING_HOME = stateHome + + await fs.mkdir(nestedCwd, { recursive: true }) + + const installLayout = resolvePiLayout(projectRoot, "install") + const syncLayout = resolvePiLayout(projectRoot, "sync") + await seedVerifiedProjectInstallNameMaps(projectRoot, { + skills: { + "compound-engineering:ce-plan": "ce-plan-legacy", + }, + }) + await seedVerifiedSyncNameMaps(projectRoot, { + skills: { + "claude-home:ce-plan": "ce-plan-sync-legacy", + }, + }) + + await fs.writeFile( + installLayout.managedManifestPath, + JSON.stringify({ + version: 1, + pluginName: "compound-engineering", + policyFingerprint: getPiPolicyFingerprint(), + nameMaps: { + skills: { + "compound-engineering:ce-plan": "ce-plan-legacy", + }, + }, + }, null, 2), + ) + await fs.writeFile( + syncLayout.managedManifestPath, + JSON.stringify({ + version: 1, + pluginName: "compound-engineering", + policyFingerprint: getPiPolicyFingerprint(), + nameMaps: { + skills: { + "claude-home:ce-plan": "ce-plan-sync-legacy", + }, + }, + }, null, 2), + ) + + const { resolveAgentName } = await loadCompatHelpers(projectRoot) + expect(resolveAgentName(nestedCwd, "compound-engineering:ce-plan")).toBe("ce-plan-legacy") + expect(resolveAgentName(nestedCwd, "claude-home:ce-plan")).toBe("ce-plan-sync-legacy") + + delete process.env.COMPOUND_ENGINEERING_HOME + }) + + test("compat runtime trusts verified legacy install generatedSkills arrays", async () => { + const tempRoot = await fs.mkdtemp(path.join(os.tmpdir(), "sync-pi-runtime-legacy-generated-skills-")) + const stateHome = path.join(tempRoot, "state-home") + const projectRoot = path.join(tempRoot, "project") + const nestedCwd = path.join(projectRoot, "nested", "cwd") + process.env.COMPOUND_ENGINEERING_HOME = stateHome + + await fs.mkdir(nestedCwd, { recursive: true }) + const installLayout = resolvePiLayout(projectRoot, "install") + await seedVerifiedProjectInstallNameMaps(projectRoot, { + skills: { + "compound-engineering:ce-plan": "ce-plan-legacy", + }, + }) + + await rewriteManifestWithMatchingVerification( + installLayout, + "install", + (manifest) => { + manifest.generatedSkills = [{ sourceName: "compound-engineering:ce-plan", outputPath: path.join(installLayout.skillsDir, "ce-plan-legacy") }] + return manifest + }, + createPiManagedSection({ + nameMaps: { + skills: { + "compound-engineering:ce-plan": "ce-plan-legacy", + }, + }, + artifacts: [createManagedArtifact(installLayout, "generated-skill", "compound-engineering:ce-plan", "ce-plan-legacy")], + }), + ) + + const { resolveAgentName } = await loadCompatHelpers(projectRoot) + expect(resolveAgentName(nestedCwd, "compound-engineering:ce-plan")).toBe("ce-plan-legacy") + + delete process.env.COMPOUND_ENGINEERING_HOME + }) + + test("compat runtime trusts verified legacy sync prompt arrays", async () => { + const tempRoot = await fs.mkdtemp(path.join(os.tmpdir(), "sync-pi-runtime-legacy-sync-prompts-")) + const stateHome = path.join(tempRoot, "state-home") + const projectRoot = path.join(tempRoot, "project") + const nestedCwd = path.join(projectRoot, "nested", "cwd") + process.env.COMPOUND_ENGINEERING_HOME = stateHome + + await fs.mkdir(nestedCwd, { recursive: true }) + const syncLayout = resolvePiLayout(projectRoot, "sync") + await seedVerifiedSyncNameMaps(projectRoot, { + skills: { + "claude-home:plan-review": "plan-review-legacy", + }, + }) + + await rewriteManifestWithMatchingVerification( + syncLayout, + "sync", + (manifest) => { + manifest.syncPrompts = [{ sourceName: "claude-home:plan-review", outputPath: path.join(syncLayout.promptsDir, "plan-review-legacy.md") }] + return manifest + }, + createPiManagedSection({ + nameMaps: { + skills: { + "claude-home:plan-review": "plan-review-legacy", + }, + }, + artifacts: [createManagedArtifact(syncLayout, "prompt", "claude-home:plan-review", "plan-review-legacy")], + }), + ) + + const trust = await loadCompatHelpers(projectRoot) + expect(trust.resolveAgentName(nestedCwd, "claude-home:plan-review")).toBe("plan-review-legacy") + + delete process.env.COMPOUND_ENGINEERING_HOME + }) + + test("compat runtime keeps scoped name maps authoritative over legacy top-level maps", async () => { + const tempRoot = await fs.mkdtemp(path.join(os.tmpdir(), "sync-pi-runtime-scoped-over-legacy-")) + const stateHome = path.join(tempRoot, "state-home") + const projectRoot = path.join(tempRoot, "project") + const nestedCwd = path.join(projectRoot, "nested", "cwd") + process.env.COMPOUND_ENGINEERING_HOME = stateHome + + await fs.mkdir(nestedCwd, { recursive: true }) + + const layout = resolvePiLayout(projectRoot, "sync") + await writePiManagedState( + layout, + { + version: 1, + pluginName: "compound-engineering", + install: createPiManagedSection({ + nameMaps: { + skills: { + "compound-engineering:ce-plan": "ce-plan-install", + }, + }, + }), + sync: createPiManagedSection({ + nameMaps: { + skills: { + "claude-home:ce-plan": "ce-plan-sync", + }, + }, + }), + nameMaps: { + skills: { + "compound-engineering:ce-plan": "ce-plan-install", + "claude-home:ce-plan": "ce-plan-sync", + }, + }, + }, + { install: true, sync: true }, + ) + + const manifest = JSON.parse(await fs.readFile(layout.managedManifestPath, "utf8")) as { + version: number + pluginName?: string + nameMaps?: { skills?: Record } + install?: { nameMaps?: { skills?: Record } } + sync?: { nameMaps?: { skills?: Record } } + } + manifest.nameMaps = { + skills: { + "compound-engineering:ce-plan": "ce-plan-legacy", + "claude-home:ce-plan": "ce-plan-sync-legacy", + }, + } + await fs.writeFile(layout.managedManifestPath, JSON.stringify(manifest, null, 2)) + + const { resolveAgentName } = await loadCompatHelpers(projectRoot) + expect(resolveAgentName(nestedCwd, "compound-engineering:ce-plan")).toBe("ce-plan-install") + expect(resolveAgentName(nestedCwd, "claude-home:ce-plan")).toBe("ce-plan-sync") + + delete process.env.COMPOUND_ENGINEERING_HOME + }) + + test("runtime and on-disk state agree across install, sync, and nested cwd lookup for canonical custom-root layouts", async () => { + const tempRoot = await fs.mkdtemp(path.join(os.tmpdir(), "sync-pi-runtime-custom-root-contract-")) + const stateHome = path.join(tempRoot, "state-home") + const projectRoot = path.join(tempRoot, "project") + const nestedCwd = path.join(projectRoot, "apps", "docs") + const installRoot = path.join(projectRoot, "custom-install-root") + process.env.COMPOUND_ENGINEERING_HOME = stateHome + + await fs.mkdir(nestedCwd, { recursive: true }) + await seedVerifiedProjectInstallNameMaps(projectRoot, { + skills: { + "compound-engineering:ce-plan": "ce-plan-install", + }, + }) + await fs.mkdir(path.join(projectRoot, "compound-engineering"), { recursive: true }) + await fs.writeFile( + path.join(projectRoot, "compound-engineering", "mcporter.json"), + JSON.stringify({ mcpServers: { sync: {} } }, null, 2), + ) + + await syncToPi({ + skills: [ + { + name: "ce-plan", + sourceDir: path.join(import.meta.dir, "fixtures", "sample-plugin", "skills", "skill-one"), + skillPath: path.join(import.meta.dir, "fixtures", "sample-plugin", "skills", "skill-one", "SKILL.md"), + }, + ], + mcpServers: { + sync: { command: "echo" }, + }, + }, projectRoot) + + await writePiBundle(installRoot, { + pluginName: "compound-engineering", + prompts: [{ name: "workflows-review", content: "Review content" }], + skillDirs: [], + generatedSkills: [], + extensions: [], + nameMaps: { + skills: { + "compound-engineering:ce-plan": "ce-plan-install-root", + }, + }, + }) + + const installLayout = resolvePiLayout(installRoot, "install") + expect(await fs.readFile(path.join(installLayout.promptsDir, "workflows-review.md"), "utf8")).toContain("Review content") + expect(await fs.readFile(path.join(projectRoot, "skills", "ce-plan", "SKILL.md"), "utf8")).toContain("Sample skill") + + const { resolveAgentName, resolveMcporterConfigPath } = await loadCompatHelpers(projectRoot) + expect(resolveAgentName(nestedCwd, "claude-home:ce-plan")).toBe("ce-plan") + expect(resolveMcporterConfigPath(nestedCwd)).toBe(path.join(projectRoot, "compound-engineering", "mcporter.json")) + + delete process.env.COMPOUND_ENGINEERING_HOME + }) + + test("compat runtime refreshes alias routing after a same-process manifest update", async () => { + const tempRoot = await fs.mkdtemp(path.join(os.tmpdir(), "sync-pi-runtime-refresh-")) + const stateHome = path.join(tempRoot, "state-home") + const projectRoot = path.join(tempRoot, "project") + const nestedCwd = path.join(projectRoot, "nested", "cwd") + process.env.COMPOUND_ENGINEERING_HOME = stateHome + + await fs.mkdir(nestedCwd, { recursive: true }) + await seedVerifiedProjectInstallNameMaps(projectRoot, { + agents: { + "compound-engineering:ce:plan": "ce-plan", + }, + }) + + const { resolveAgentName } = await loadCompatHelpers(projectRoot) + + expect(resolveAgentName(nestedCwd, "compound-engineering:ce:plan")).toBe("ce-plan") + + await seedVerifiedProjectInstallNameMaps(projectRoot, { + agents: { + "compound-engineering:ce:plan": "ce-plan-2", + }, + }) + + expect(resolveAgentName(nestedCwd, "compound-engineering:ce:plan")).toBe("ce-plan-2") + + delete process.env.COMPOUND_ENGINEERING_HOME + }) + + test("compat runtime recovers after a transient manifest parse failure in the same process", async () => { + const tempRoot = await fs.mkdtemp(path.join(os.tmpdir(), "sync-pi-runtime-parse-recovery-")) + const stateHome = path.join(tempRoot, "state-home") + const projectRoot = path.join(tempRoot, "project") + const nestedCwd = path.join(projectRoot, "nested", "cwd") + process.env.COMPOUND_ENGINEERING_HOME = stateHome + + await fs.mkdir(nestedCwd, { recursive: true }) + await seedVerifiedProjectInstallNameMaps(projectRoot, { + agents: { + "compound-engineering:ce:plan": "ce-plan", + }, + }) + + const { resolveAgentName } = await loadCompatHelpers(projectRoot) + const layout = resolvePiLayout(projectRoot, "install") + + expect(resolveAgentName(nestedCwd, "compound-engineering:ce:plan")).toBe("ce-plan") + + await fs.writeFile(layout.managedManifestPath, "{ invalid json\n") + expect(() => resolveAgentName(nestedCwd, "compound-engineering:ce:plan")).toThrow("Unknown qualified subagent target") + + await seedVerifiedProjectInstallNameMaps(projectRoot, { + agents: { + "compound-engineering:ce:plan": "ce-plan-fixed", + }, + }) + + expect(resolveAgentName(nestedCwd, "compound-engineering:ce:plan")).toBe("ce-plan-fixed") + + delete process.env.COMPOUND_ENGINEERING_HOME + }) + + test("compat runtime fails closed for an unverified nearest project install manifest", async () => { + const tempRoot = await fs.mkdtemp(path.join(os.tmpdir(), "sync-pi-runtime-unverified-install-fallback-")) + const stateHome = path.join(tempRoot, "state-home") + const fakeHome = path.join(tempRoot, "home") + const projectRoot = path.join(tempRoot, "project") + const nestedCwd = path.join(projectRoot, "nested", "cwd") + const originalHome = process.env.HOME + process.env.COMPOUND_ENGINEERING_HOME = stateHome + process.env.HOME = fakeHome + + await fs.mkdir(nestedCwd, { recursive: true }) + await fs.mkdir(path.join(projectRoot, "compound-engineering"), { recursive: true }) + await fs.writeFile( + path.join(projectRoot, "compound-engineering", "compound-engineering-managed.json"), + JSON.stringify({ + version: 1, + install: { + nameMaps: { + skills: { + "compound-engineering:ce-plan": "ce-plan-local", + }, + }, + sharedResources: { + compatExtension: true, + }, + }, + }, null, 2), + ) + + await seedVerifiedInstallNameMaps(path.join(fakeHome, ".pi", "agent"), { + skills: { + "compound-engineering:ce-plan": "ce-plan-global", + }, + }) + + const globalLayout = resolvePiLayout(path.join(fakeHome, ".pi", "agent"), "sync") + await fs.mkdir(globalLayout.extensionsDir, { recursive: true }) + await fs.writeFile(path.join(globalLayout.extensionsDir, "compound-engineering-compat.ts"), PI_COMPAT_EXTENSION_SOURCE) + + const { resolveAgentName } = await loadCompatHelpers(projectRoot) + expect(() => resolveAgentName(nestedCwd, "compound-engineering:ce-plan")).toThrow("Unknown qualified subagent target") + + delete process.env.COMPOUND_ENGINEERING_HOME + if (originalHome === undefined) { + delete process.env.HOME + } else { + process.env.HOME = originalHome + } + }) + + test("sync does not rewrite against global install aliases when an unverified nearest project install manifest blocks fallback", async () => { + const tempRoot = await fs.mkdtemp(path.join(os.tmpdir(), "sync-pi-blocked-global-install-rewrite-")) + const stateHome = path.join(tempRoot, "state-home") + const fakeHome = path.join(tempRoot, "home") + const sourceSkillDir = path.join(tempRoot, "claude-skill") + const originalHome = process.env.HOME + process.env.COMPOUND_ENGINEERING_HOME = stateHome + process.env.HOME = fakeHome + + await fs.mkdir(sourceSkillDir, { recursive: true }) + await fs.mkdir(path.join(tempRoot, "compound-engineering"), { recursive: true }) + await fs.writeFile( + path.join(tempRoot, "compound-engineering", "compound-engineering-managed.json"), + JSON.stringify({ + version: 1, + install: { + nameMaps: { + skills: { + "compound-engineering:ce-plan": "ce-plan-local", + }, + }, + }, + }, null, 2), + ) + await seedVerifiedInstallNameMaps(path.join(fakeHome, ".pi", "agent"), { + skills: { + "compound-engineering:ce-plan": "ce-plan-global", + }, + }) + await fs.writeFile( + path.join(sourceSkillDir, "SKILL.md"), + [ + "---", + "name: docs-skill", + "description: runtime-blocked global fallback must not rewrite", + "---", + "", + "- /skill:compound-engineering:ce-plan", + ].join("\n"), + ) + + await syncToPi({ + skills: [ + { + name: "docs-skill", + sourceDir: sourceSkillDir, + skillPath: path.join(sourceSkillDir, "SKILL.md"), + }, + ], + mcpServers: {}, + }, tempRoot) + + const syncedSkill = await fs.readFile(path.join(tempRoot, "skills", "docs-skill", "SKILL.md"), "utf8") + expect(syncedSkill).not.toContain("/skill:ce-plan-global") + + delete process.env.COMPOUND_ENGINEERING_HOME + if (originalHome === undefined) { + delete process.env.HOME + } else { + process.env.HOME = originalHome + } + }) + + test("compat runtime refreshes alias trust after verification removal without manifest changes", async () => { + const tempRoot = await fs.mkdtemp(path.join(os.tmpdir(), "sync-pi-runtime-verification-refresh-")) + const stateHome = path.join(tempRoot, "state-home") + const projectRoot = path.join(tempRoot, "project") + const nestedCwd = path.join(projectRoot, "nested", "cwd") + process.env.COMPOUND_ENGINEERING_HOME = stateHome + + await fs.mkdir(nestedCwd, { recursive: true }) + await seedVerifiedProjectInstallNameMaps(projectRoot, { + agents: { + "compound-engineering:ce:plan": "ce-plan", + }, + }) + + const { resolveAgentName } = await loadCompatHelpers(projectRoot) + const layout = resolvePiLayout(projectRoot, "install") + + expect(resolveAgentName(nestedCwd, "compound-engineering:ce:plan")).toBe("ce-plan") + + await fs.unlink(layout.verificationPath) + expect(() => resolveAgentName(nestedCwd, "compound-engineering:ce:plan")).toThrow("Unknown qualified subagent target") + + await writePiManagedState( + layout, + replacePiManagedSection(null, "install", createPiManagedSection({ + nameMaps: { + agents: { + "compound-engineering:ce:plan": "ce-plan-restored", + }, + }, + }), "compound-engineering"), + { install: true, sync: false }, + ) + + expect(resolveAgentName(nestedCwd, "compound-engineering:ce:plan")).toBe("ce-plan-restored") + + delete process.env.COMPOUND_ENGINEERING_HOME + }) + + test("compat runtime treats trailing-separator install roots as the same canonical trusted root", async () => { + const tempRoot = await fs.mkdtemp(path.join(os.tmpdir(), "sync-pi-runtime-trailing-root-")) + const stateHome = path.join(tempRoot, "state-home") + const projectRoot = path.join(tempRoot, "project") + const nestedCwd = path.join(projectRoot, "nested", "cwd") + process.env.COMPOUND_ENGINEERING_HOME = stateHome + + await fs.mkdir(nestedCwd, { recursive: true }) + await writePiBundle(projectRoot + path.sep, { + pluginName: "compound-engineering", + prompts: [], + skillDirs: [], + generatedSkills: [ + { + name: "ce-plan", + sourceName: "compound-engineering:ce:plan", + content: "---\nname: ce-plan\ndescription: Plan\n---\n", + }, + ], + extensions: [], + nameMaps: { + agents: { + "compound-engineering:ce:plan": "ce-plan", + }, + }, + }) + + const { resolveAgentName } = await loadCompatHelpers(projectRoot) + expect(resolveAgentName(nestedCwd, "compound-engineering:ce:plan")).toBe("ce-plan") + + delete process.env.COMPOUND_ENGINEERING_HOME + }) + + test("does not collapse unknown qualified refs to local same-leaf targets during Claude-home Pi sync", async () => { + const tempRoot = await fs.mkdtemp(path.join(os.tmpdir(), "sync-pi-qualified-shadowing-")) + const sourceSkillDir = path.join(tempRoot, "claude-skill") + const localSkillDir = path.join(tempRoot, "local-plan") + + await fs.mkdir(sourceSkillDir, { recursive: true }) + await fs.mkdir(localSkillDir, { recursive: true }) + await fs.writeFile(path.join(localSkillDir, "SKILL.md"), "---\nname: ce-plan\n---\n") + await fs.writeFile( + path.join(sourceSkillDir, "SKILL.md"), + [ + "---", + "name: docs-skill", + "description: Avoids local shadowing", + "---", + "", + "- /skill:unknown-plugin:ce-plan", + ].join("\n"), + ) + + const config: ClaudeHomeConfig = { + skills: [ + { + name: "docs-skill", + sourceDir: sourceSkillDir, + skillPath: path.join(sourceSkillDir, "SKILL.md"), + }, + { + name: "ce-plan", + sourceDir: localSkillDir, + skillPath: path.join(localSkillDir, "SKILL.md"), + }, + ], + mcpServers: {}, + } + + await syncToPi(config, tempRoot) + + const syncedSkill = await fs.readFile(path.join(tempRoot, "skills", "docs-skill", "SKILL.md"), "utf8") + expect(syncedSkill).toContain("/skill:unknown-plugin:ce-plan") + expect(syncedSkill).not.toContain("/skill:ce-plan") + }) + + test("skips only the offending skill when foreign qualified Task refs are unsupported during Pi sync", async () => { + const tempRoot = await fs.mkdtemp(path.join(os.tmpdir(), "sync-pi-qualified-task-shadowing-")) + const sourceSkillDir = path.join(tempRoot, "claude-skill") + const localSkillDir = path.join(tempRoot, "local-agent") + const validSkillDir = path.join(tempRoot, "valid-skill") + const warnSpy = spyOn(console, "warn").mockImplementation(() => {}) + + await fs.mkdir(sourceSkillDir, { recursive: true }) + await fs.mkdir(localSkillDir, { recursive: true }) + await fs.mkdir(validSkillDir, { recursive: true }) + await fs.writeFile(path.join(localSkillDir, "SKILL.md"), "---\nname: some-missing-agent\ndescription: Local shadow\n---\n") + await fs.writeFile(path.join(validSkillDir, "SKILL.md"), "---\nname: valid-skill\ndescription: Valid\n---\n\nBody\n") + await fs.writeFile( + path.join(sourceSkillDir, "SKILL.md"), + [ + "---", + "name: docs-skill", + "description: Avoids task shadowing", + "---", + "", + "- Task unknown-plugin:review:some-missing-agent(feature_description)", + ].join("\n"), + ) + + const config: ClaudeHomeConfig = { + skills: [ + { + name: "docs-skill", + sourceDir: sourceSkillDir, + skillPath: path.join(sourceSkillDir, "SKILL.md"), + }, + { + name: "some-missing-agent", + sourceDir: localSkillDir, + skillPath: path.join(localSkillDir, "SKILL.md"), + }, + ], + mcpServers: {}, + } + + await syncToPi({ + ...config, + skills: [ + ...config.skills, + { + name: "valid-skill", + sourceDir: validSkillDir, + skillPath: path.join(validSkillDir, "SKILL.md"), + }, + ], + }, tempRoot) + + await expect(fs.access(path.join(tempRoot, "skills", "docs-skill", "SKILL.md"))).rejects.toBeDefined() + expect(await fs.readFile(path.join(tempRoot, "skills", "valid-skill", "SKILL.md"), "utf8")).toContain("Body") + expect(warnSpy).toHaveBeenCalledWith(expect.stringContaining("Skipping unsupported Pi sync skill docs-skill")) + + warnSpy.mockRestore() + }) + + test("re-renders sibling skill refs against the final published alias set after unsupported siblings drop out", async () => { + const tempRoot = await fs.mkdtemp(path.join(os.tmpdir(), "sync-pi-sibling-publication-atomicity-")) + const docsSkillDir = path.join(tempRoot, "docs-skill") + const badSkillDir = path.join(tempRoot, "bad-skill") + const goodSkillDir = path.join(tempRoot, "good-skill") + const warnSpy = spyOn(console, "warn").mockImplementation(() => {}) + + await fs.mkdir(docsSkillDir, { recursive: true }) + await fs.mkdir(badSkillDir, { recursive: true }) + await fs.mkdir(goodSkillDir, { recursive: true }) + await fs.writeFile( + path.join(docsSkillDir, "SKILL.md"), + [ + "---", + "name: docs-skill", + "description: depends on bad sibling publish", + "---", + "", + "- /skill:claude-home:bad-skill", + ].join("\n"), + ) + await fs.writeFile( + path.join(badSkillDir, "SKILL.md"), + [ + "---", + "name: bad-skill", + "description: unsupported sibling", + "---", + "", + "- Task unknown-plugin:review:bad(feature_description)", + ].join("\n"), + ) + await fs.writeFile(path.join(goodSkillDir, "SKILL.md"), "---\nname: good-skill\n---\n\nBody\n") + + await syncToPi({ + skills: [ + { + name: "docs-skill", + sourceDir: docsSkillDir, + skillPath: path.join(docsSkillDir, "SKILL.md"), + }, + { + name: "bad-skill", + sourceDir: badSkillDir, + skillPath: path.join(badSkillDir, "SKILL.md"), + }, + { + name: "good-skill", + sourceDir: goodSkillDir, + skillPath: path.join(goodSkillDir, "SKILL.md"), + }, + ], + mcpServers: {}, + }, tempRoot) + + await expect(fs.access(path.join(tempRoot, "skills", "bad-skill", "SKILL.md"))).rejects.toBeDefined() + const docsSkill = await fs.readFile(path.join(tempRoot, "skills", "docs-skill", "SKILL.md"), "utf8") + expect(await fs.readFile(path.join(tempRoot, "skills", "good-skill", "SKILL.md"), "utf8")).toContain("Body") + expect(docsSkill).not.toContain("/skill:bad-skill") + expect(docsSkill).toContain("/skill:claude-home:bad-skill") + + const trust = await loadPiManagedStateWithTrust(resolvePiLayout(tempRoot, "sync")) + expect(trust.state?.sync.nameMaps.skills["good-skill"]).toBe("good-skill") + expect(trust.state?.sync.nameMaps.skills["bad-skill"]).toBeUndefined() + expect(trust.state?.sync.nameMaps.skills["docs-skill"]).toBe("docs-skill") + expect(warnSpy).toHaveBeenCalledWith(expect.stringContaining("Skipping unsupported Pi sync skill bad-skill")) + + warnSpy.mockRestore() + }) + + test("retries a first-pass blocked skill after a colliding sibling drops out", async () => { + const tempRoot = await fs.mkdtemp(path.join(os.tmpdir(), "sync-pi-retry-skill-after-shrink-")) + const retryableSkillDir = path.join(tempRoot, "retryable-skill") + const blockingSkillDir = path.join(tempRoot, "blocking-skill") + const validSkillDir = path.join(tempRoot, "valid-skill") + const warnSpy = spyOn(console, "warn").mockImplementation(() => {}) + + await fs.mkdir(retryableSkillDir, { recursive: true }) + await fs.mkdir(blockingSkillDir, { recursive: true }) + await fs.mkdir(validSkillDir, { recursive: true }) + await fs.writeFile( + path.join(retryableSkillDir, "SKILL.md"), + [ + "---", + "name: docs-skill", + "description: retry after sibling shrink", + "---", + "", + "- Task compound-engineering:review:bad(feature_description)", + ].join("\n"), + ) + await fs.writeFile( + path.join(blockingSkillDir, "SKILL.md"), + [ + "---", + "name: bad", + "description: blocks first pass then drops out", + "---", + "", + "- Task unknown-plugin:review:bad(feature_description)", + ].join("\n"), + ) + await fs.writeFile(path.join(validSkillDir, "SKILL.md"), "---\nname: valid-skill\n---\n\nBody\n") + + await syncToPi({ + skills: [ + { name: "docs-skill", sourceDir: retryableSkillDir, skillPath: path.join(retryableSkillDir, "SKILL.md") }, + { name: "bad", sourceDir: blockingSkillDir, skillPath: path.join(blockingSkillDir, "SKILL.md") }, + { name: "valid-skill", sourceDir: validSkillDir, skillPath: path.join(validSkillDir, "SKILL.md") }, + ], + mcpServers: {}, + }, tempRoot) + + const docsSkill = await fs.readFile(path.join(tempRoot, "skills", "docs-skill", "SKILL.md"), "utf8") + expect(docsSkill).toContain('Run ce_subagent with agent="bad" and task="feature_description".') + await expect(fs.access(path.join(tempRoot, "skills", "bad", "SKILL.md"))).rejects.toBeDefined() + expect(await fs.readFile(path.join(tempRoot, "skills", "valid-skill", "SKILL.md"), "utf8")).toContain("Body") + expect(warnSpy).toHaveBeenCalledWith(expect.stringContaining("Skipping unsupported Pi sync skill bad")) + expect(warnSpy).not.toHaveBeenCalledWith(expect.stringContaining("Skipping unsupported Pi sync skill docs-skill")) + + const trust = await loadPiManagedStateWithTrust(resolvePiLayout(tempRoot, "sync")) + expect(trust.state?.sync.nameMaps.skills["docs-skill"]).toBe("docs-skill") + expect(trust.state?.sync.nameMaps.skills.bad).toBeUndefined() + + warnSpy.mockRestore() + }) + + test("retries a first-pass blocked prompt after a colliding sibling skill drops out", async () => { + const tempRoot = await fs.mkdtemp(path.join(os.tmpdir(), "sync-pi-retry-prompt-after-shrink-")) + const blockingSkillDir = path.join(tempRoot, "blocking-skill") + const warnSpy = spyOn(console, "warn").mockImplementation(() => {}) + + await fs.mkdir(blockingSkillDir, { recursive: true }) + await fs.writeFile( + path.join(blockingSkillDir, "SKILL.md"), + [ + "---", + "name: bad", + "description: blocks prompt first pass then drops out", + "---", + "", + "- Task unknown-plugin:review:bad(feature_description)", + ].join("\n"), + ) + + await syncToPi({ + skills: [ + { name: "bad", sourceDir: blockingSkillDir, skillPath: path.join(blockingSkillDir, "SKILL.md") }, + ], + commands: [ + { + name: "plan-review", + description: "Prompt retries after sibling shrink", + body: "- Task compound-engineering:review:bad(feature_description)", + sourcePath: path.join(tempRoot, "commands", "plan-review.md"), + }, + { + name: "safe-review", + description: "Still publishes", + body: "Body", + sourcePath: path.join(tempRoot, "commands", "safe-review.md"), + }, + ], + mcpServers: {}, + }, tempRoot) + + expect(await fs.readFile(path.join(tempRoot, "prompts", "plan-review.md"), "utf8")).toContain('Run ce_subagent with agent="bad" and task="feature_description".') + expect(await fs.readFile(path.join(tempRoot, "prompts", "safe-review.md"), "utf8")).toContain("Body") + await expect(fs.access(path.join(tempRoot, "skills", "bad", "SKILL.md"))).rejects.toBeDefined() + expect(warnSpy).toHaveBeenCalledWith(expect.stringContaining("Skipping unsupported Pi sync skill bad")) + expect(warnSpy).not.toHaveBeenCalledWith(expect.stringContaining("Skipping unsupported Pi sync command plan-review")) + + const trust = await loadPiManagedStateWithTrust(resolvePiLayout(tempRoot, "sync")) + expect(trust.state?.sync.nameMaps.prompts["plan-review"]).toBe("plan-review") + + warnSpy.mockRestore() + }) + + test("rewrites same-run claude-home qualified sibling refs for both synced commands and skills", async () => { + const tempRoot = await fs.mkdtemp(path.join(os.tmpdir(), "sync-pi-same-run-qualified-sibling-")) + const docsSkillDir = path.join(tempRoot, "docs-skill") + const planSkillDir = path.join(tempRoot, "plan-skill") + + await fs.mkdir(docsSkillDir, { recursive: true }) + await fs.mkdir(planSkillDir, { recursive: true }) + await fs.writeFile( + path.join(docsSkillDir, "SKILL.md"), + [ + "---", + "name: docs-skill", + "description: rewrites same-run qualified refs", + "---", + "", + "- /skill:claude-home:ce:plan", + "- Task claude-home:ce:plan(feature_description)", + ].join("\n"), + ) + await fs.writeFile(path.join(planSkillDir, "SKILL.md"), "---\nname: ce:plan\n---\n\nBody\n") + + await syncToPi({ + skills: [ + { + name: "docs-skill", + sourceDir: docsSkillDir, + skillPath: path.join(docsSkillDir, "SKILL.md"), + }, + { + name: "ce:plan", + sourceDir: planSkillDir, + skillPath: path.join(planSkillDir, "SKILL.md"), + }, + ], + commands: [ + { + name: "plan-review", + description: "rewrites same-run qualified task refs", + body: "- Task claude-home:ce:plan(feature_description)", + sourcePath: path.join(tempRoot, "commands", "plan-review.md"), + }, + ], + mcpServers: {}, + }, tempRoot) + + const syncedSkill = await fs.readFile(path.join(tempRoot, "skills", "docs-skill", "SKILL.md"), "utf8") + const syncedPrompt = await fs.readFile(path.join(tempRoot, "prompts", "plan-review.md"), "utf8") + + expect(syncedSkill).toContain("/skill:ce-plan") + expect(syncedSkill).not.toContain("/skill:claude-home-ce-plan") + expect(syncedSkill).toContain('Run ce_subagent with agent="ce-plan" and task="feature_description".') + expect(syncedPrompt).toContain('Run ce_subagent with agent="ce-plan" and task="feature_description".') + }) + + test("narrows second-pass sync work to retryable artifacts only", async () => { + const tempRoot = await fs.mkdtemp(path.join(os.tmpdir(), "sync-pi-narrow-rerun-")) + const blockingSkillDir = path.join(tempRoot, "blocking-skill") + const stableSkillDir = path.join(tempRoot, "stable-skill") + const passPayloads: Array<{ passNumber: number; activeCommandNames: string[]; activeSkillNames: string[] }> = [] + const warnSpy = spyOn(console, "warn").mockImplementation(() => {}) + + await fs.mkdir(blockingSkillDir, { recursive: true }) + await fs.mkdir(stableSkillDir, { recursive: true }) + await fs.writeFile( + path.join(blockingSkillDir, "SKILL.md"), + [ + "---", + "name: bad", + "description: blocks prompt first pass then drops out", + "---", + "", + "- Task unknown-plugin:review:bad(feature_description)", + ].join("\n"), + ) + await fs.writeFile(path.join(stableSkillDir, "SKILL.md"), "---\nname: stable-skill\n---\n\nStable\n") + + setPiSyncPassHookForTests((payload) => { + passPayloads.push(payload) + }) + + await syncToPi({ + skills: [ + { + name: "bad", + sourceDir: blockingSkillDir, + skillPath: path.join(blockingSkillDir, "SKILL.md"), + }, + { + name: "stable-skill", + sourceDir: stableSkillDir, + skillPath: path.join(stableSkillDir, "SKILL.md"), + }, + ], + commands: [ + { + name: "plan-review", + description: "Prompt retries after sibling shrink", + body: "- Task compound-engineering:review:bad(feature_description)", + sourcePath: path.join(tempRoot, "commands", "plan-review.md"), + }, + { + name: "safe-review", + description: "Still publishes", + body: "Body", + sourcePath: path.join(tempRoot, "commands", "safe-review.md"), + }, + ], + mcpServers: {}, + }, tempRoot) + + expect(passPayloads).toHaveLength(2) + expect(passPayloads[0]).toEqual({ + passNumber: 1, + activeCommandNames: ["plan-review", "safe-review"], + activeSkillNames: ["bad", "stable-skill"], + }) + expect(passPayloads[1]).toEqual({ + passNumber: 2, + activeCommandNames: ["plan-review"], + activeSkillNames: [], + }) + + warnSpy.mockRestore() + }) + + test("batches Pi prompt conversion for multi-command sync when prompts are all convertible", async () => { + const tempRoot = await fs.mkdtemp(path.join(os.tmpdir(), "sync-pi-command-batch-convert-")) + let conversionCalls = 0 + setPiSyncCommandConversionHookForTests(() => { + conversionCalls += 1 + }) + + await syncToPi({ + skills: [], + commands: [ + { + name: "plan-review", + description: "First prompt", + body: "Body one", + sourcePath: path.join(tempRoot, "commands", "plan-review.md"), + }, + { + name: "safe-review", + description: "Second prompt", + body: "Body two", + sourcePath: path.join(tempRoot, "commands", "safe-review.md"), + }, + ], + mcpServers: {}, + }, tempRoot) + + expect(conversionCalls).toBe(1) + expect(await fs.readFile(path.join(tempRoot, "prompts", "plan-review.md"), "utf8")).toContain("Body one") + expect(await fs.readFile(path.join(tempRoot, "prompts", "safe-review.md"), "utf8")).toContain("Body two") + }) + + test("narrowed reruns preserve the same final sync outputs as the canonical full rerun", async () => { + const narrowRoot = await fs.mkdtemp(path.join(os.tmpdir(), "sync-pi-rerun-parity-narrow-")) + const fullRoot = await fs.mkdtemp(path.join(os.tmpdir(), "sync-pi-rerun-parity-full-")) + + for (const root of [narrowRoot, fullRoot]) { + const blockingSkillDir = path.join(root, "blocking-skill") + const retryableSkillDir = path.join(root, "retryable-skill") + const stableSkillDir = path.join(root, "stable-skill") + await fs.mkdir(blockingSkillDir, { recursive: true }) + await fs.mkdir(retryableSkillDir, { recursive: true }) + await fs.mkdir(stableSkillDir, { recursive: true }) + await fs.writeFile( + path.join(blockingSkillDir, "SKILL.md"), + [ + "---", + "name: bad", + "description: blocks first pass then drops out", + "---", + "", + "- Task unknown-plugin:review:bad(feature_description)", + ].join("\n"), + ) + await fs.writeFile( + path.join(retryableSkillDir, "SKILL.md"), + [ + "---", + "name: docs-skill", + "description: retry after sibling shrink", + "---", + "", + "- Task compound-engineering:review:bad(feature_description)", + ].join("\n"), + ) + await fs.writeFile(path.join(stableSkillDir, "SKILL.md"), "---\nname: stable-skill\n---\n\nStable\n") + } + + const buildConfig = (root: string): ClaudeHomeConfig => ({ + skills: [ + { + name: "docs-skill", + sourceDir: path.join(root, "retryable-skill"), + skillPath: path.join(root, "retryable-skill", "SKILL.md"), + }, + { + name: "bad", + sourceDir: path.join(root, "blocking-skill"), + skillPath: path.join(root, "blocking-skill", "SKILL.md"), + }, + { + name: "stable-skill", + sourceDir: path.join(root, "stable-skill"), + skillPath: path.join(root, "stable-skill", "SKILL.md"), + }, + ], + commands: [ + { + name: "plan-review", + description: "Prompt retries after sibling shrink", + body: "- Task compound-engineering:review:bad(feature_description)", + sourcePath: path.join(root, "commands", "plan-review.md"), + }, + { + name: "safe-review", + description: "Still publishes", + body: "Body", + sourcePath: path.join(root, "commands", "safe-review.md"), + }, + ], + mcpServers: {}, + }) + + const narrowWarnings: string[] = [] + let warnSpy = spyOn(console, "warn").mockImplementation((message: string) => { + narrowWarnings.push(message) + }) + + await syncToPi(buildConfig(narrowRoot), narrowRoot) + warnSpy.mockRestore() + + const fullWarnings: string[] = [] + warnSpy = spyOn(console, "warn").mockImplementation((message: string) => { + fullWarnings.push(message) + }) + setPiSyncRerunModeForTests("full") + await syncToPi(buildConfig(fullRoot), fullRoot) + warnSpy.mockRestore() + + const [narrowTree, fullTree] = await Promise.all([ + readTreeSnapshot(narrowRoot), + readTreeSnapshot(fullRoot), + ]) + const [narrowTrust, fullTrust] = await Promise.all([ + loadPiManagedStateWithTrust(resolvePiLayout(narrowRoot, "sync")), + loadPiManagedStateWithTrust(resolvePiLayout(fullRoot, "sync")), + ]) + + expect(normalizeRootPaths(narrowTree, narrowRoot)).toEqual(normalizeRootPaths(fullTree, fullRoot)) + expect(normalizeRootPaths(narrowWarnings, narrowRoot)).toEqual(normalizeRootPaths(fullWarnings, fullRoot)) + expect(normalizeRootPaths(narrowTrust.state?.sync, narrowRoot)).toEqual(normalizeRootPaths(fullTrust.state?.sync, fullRoot)) + }) + + test("skips unresolved first-party qualified Task refs instead of retargeting to same-leaf local aliases", async () => { + const tempRoot = await fs.mkdtemp(path.join(os.tmpdir(), "sync-pi-unresolved-first-party-task-")) + const sourceSkillDir = path.join(tempRoot, "claude-skill") + const localSkillDir = path.join(tempRoot, "local-agent") + const warnSpy = spyOn(console, "warn").mockImplementation(() => {}) + + await fs.mkdir(sourceSkillDir, { recursive: true }) + await fs.mkdir(localSkillDir, { recursive: true }) + await fs.writeFile(path.join(localSkillDir, "SKILL.md"), "---\nname: missing-agent\ndescription: Local shadow\n---\n") + await fs.writeFile( + path.join(sourceSkillDir, "SKILL.md"), + [ + "---", + "name: docs-skill", + "description: Avoids first-party task shadowing", + "---", + "", + "- Task compound-engineering:review:missing-agent(feature_description)", + ].join("\n"), + ) + + await syncToPi({ + skills: [ + { + name: "docs-skill", + sourceDir: sourceSkillDir, + skillPath: path.join(sourceSkillDir, "SKILL.md"), + }, + { + name: "missing-agent", + sourceDir: localSkillDir, + skillPath: path.join(localSkillDir, "SKILL.md"), + }, + ], + mcpServers: {}, + }, tempRoot) + + await expect(fs.access(path.join(tempRoot, "skills", "docs-skill", "SKILL.md"))).rejects.toBeDefined() + expect(warnSpy).toHaveBeenCalledWith(expect.stringContaining("Skipping unsupported Pi sync skill docs-skill")) + + warnSpy.mockRestore() + }) + + test("does not collapse unresolved first-party qualified /skill refs to local leaf names during sync", async () => { + const tempRoot = await fs.mkdtemp(path.join(os.tmpdir(), "sync-pi-unresolved-first-party-skill-ref-")) + const sourceSkillDir = path.join(tempRoot, "claude-skill") + const localSkillDir = path.join(tempRoot, "local-plan") + + await fs.mkdir(sourceSkillDir, { recursive: true }) + await fs.mkdir(localSkillDir, { recursive: true }) + await fs.writeFile(path.join(localSkillDir, "SKILL.md"), "---\nname: ce-plan\n---\n") + await fs.writeFile( + path.join(sourceSkillDir, "SKILL.md"), + [ + "---", + "name: docs-skill", + "description: Avoids first-party skill shadowing", + "---", + "", + "- /skill:compound-engineering:ce:plan", + ].join("\n"), + ) + + await syncToPi({ + skills: [ + { + name: "docs-skill", + sourceDir: sourceSkillDir, + skillPath: path.join(sourceSkillDir, "SKILL.md"), + }, + { + name: "ce-plan", + sourceDir: localSkillDir, + skillPath: path.join(localSkillDir, "SKILL.md"), + }, + ], + mcpServers: {}, + }, tempRoot) + + const syncedSkill = await fs.readFile(path.join(tempRoot, "skills", "docs-skill", "SKILL.md"), "utf8") + expect(syncedSkill).toContain("/skill:compound-engineering-ce-plan") + expect(syncedSkill).not.toContain("/skill:ce-plan") + }) + + test("skips only the offending prompt when foreign qualified Task refs are unsupported in synced commands", async () => { + const tempRoot = await fs.mkdtemp(path.join(os.tmpdir(), "sync-pi-command-foreign-task-")) + const warnSpy = spyOn(console, "warn").mockImplementation(() => {}) + + await syncToPi({ + skills: [], + commands: [ + { + name: "plan-review", + description: "Prompt should reject foreign qualified Task refs", + body: "- Task unknown-plugin:review:some-missing-agent(feature_description)", + sourcePath: path.join(tempRoot, "commands", "plan-review.md"), + }, + { + name: "safe-review", + description: "Prompt should still sync", + body: "Safe body", + sourcePath: path.join(tempRoot, "commands", "safe-review.md"), + }, + ], + mcpServers: {}, + }, tempRoot) + + await expect(fs.access(path.join(tempRoot, "prompts", "plan-review.md"))).rejects.toBeDefined() + expect(await fs.readFile(path.join(tempRoot, "prompts", "safe-review.md"), "utf8")).toContain("Safe body") + expect(warnSpy).toHaveBeenCalledWith(expect.stringContaining("Skipping unsupported Pi sync command plan-review")) + + warnSpy.mockRestore() + }) + + test("sync-managed state keeps only aliases for successfully published prompts and skills", async () => { + const tempRoot = await fs.mkdtemp(path.join(os.tmpdir(), "sync-pi-published-state-aliases-")) + const validSkillDir = path.join(tempRoot, "valid-skill") + const invalidSkillDir = path.join(tempRoot, "invalid-skill") + const warnSpy = spyOn(console, "warn").mockImplementation(() => {}) + + await fs.mkdir(validSkillDir, { recursive: true }) + await fs.mkdir(invalidSkillDir, { recursive: true }) + await fs.writeFile(path.join(validSkillDir, "SKILL.md"), "---\nname: valid-skill\n---\n\nBody\n") + await fs.writeFile( + path.join(invalidSkillDir, "SKILL.md"), + [ + "---", + "name: invalid-skill", + "---", + "", + "- Task unknown-plugin:review:bad(feature_description)", + ].join("\n"), + ) + + await syncToPi({ + skills: [ + { + name: "valid-skill", + sourceDir: validSkillDir, + skillPath: path.join(validSkillDir, "SKILL.md"), + }, + { + name: "invalid-skill", + sourceDir: invalidSkillDir, + skillPath: path.join(invalidSkillDir, "SKILL.md"), + }, + ], + commands: [ + { + name: "safe-review", + description: "safe", + body: "Safe body", + sourcePath: path.join(tempRoot, "commands", "safe-review.md"), + }, + { + name: "bad-review", + description: "bad", + body: "- Task unknown-plugin:review:bad(feature_description)", + sourcePath: path.join(tempRoot, "commands", "bad-review.md"), + }, + ], + mcpServers: {}, + }, tempRoot) + + const trust = await loadPiManagedStateWithTrust(resolvePiLayout(tempRoot, "sync")) + expect(trust.status).toBe("verified") + expect(trust.state?.sync.nameMaps.skills["valid-skill"]).toBe("valid-skill") + expect(trust.state?.sync.nameMaps.skills["claude-home:valid-skill"]).toBe("valid-skill") + expect(trust.state?.sync.nameMaps.skills["invalid-skill"]).toBeUndefined() + expect(trust.state?.sync.nameMaps.skills["claude-home:invalid-skill"]).toBeUndefined() + expect(trust.state?.sync.nameMaps.prompts["safe-review"]).toBe("safe-review") + expect(trust.state?.sync.nameMaps.prompts["claude-home:safe-review"]).toBe("safe-review") + expect(trust.state?.sync.nameMaps.prompts["bad-review"]).toBeUndefined() + expect(trust.state?.sync.nameMaps.prompts["claude-home:bad-review"]).toBeUndefined() + expect(warnSpy).toHaveBeenCalledWith(expect.stringContaining("Skipping unsupported Pi sync skill invalid-skill")) + expect(warnSpy).toHaveBeenCalledWith(expect.stringContaining("Skipping unsupported Pi sync command bad-review")) + + warnSpy.mockRestore() + }) + + test("sync-managed MCP ownership includes only emitted mcporter server keys", async () => { + const tempRoot = await fs.mkdtemp(path.join(os.tmpdir(), "sync-pi-published-state-mcp-")) + + await syncToPi({ + skills: [], + mcpServers: { + valid: { url: "https://example.com/mcp" }, + invalid: { env: { TOKEN: "x" } } as any, + }, + }, tempRoot) + + const trust = await loadPiManagedStateWithTrust(resolvePiLayout(tempRoot, "sync")) + expect(trust.status).toBe("verified") + expect(trust.state?.sync.mcpServers).toEqual(["valid"]) + expect(trust.state?.sync.sharedResources.mcporterConfig).toBe(true) + + const mcporter = JSON.parse(await fs.readFile(path.join(tempRoot, "compound-engineering", "mcporter.json"), "utf8")) as { + mcpServers: Record + } + expect(Object.keys(mcporter.mcpServers)).toEqual(["valid"]) + }) + + test("removes deleted synced prompts when Claude-home commands disappear", async () => { + const tempRoot = await fs.mkdtemp(path.join(os.tmpdir(), "sync-pi-command-deletion-")) + const promptPath = path.join(tempRoot, "prompts", "plan-review.md") + const managedManifestPath = path.join(tempRoot, "compound-engineering", "compound-engineering-managed.json") + + await syncToPi({ + skills: [], + commands: [ + { + name: "plan-review", + description: "Personal review", + body: "Review body", + sourcePath: path.join(tempRoot, "commands", "plan-review.md"), + }, + ], + mcpServers: {}, + }, tempRoot) + + expect(await fs.readFile(promptPath, "utf8")).toContain("Review body") + + await syncToPi({ + skills: [], + commands: [], + mcpServers: {}, + }, tempRoot) + + await expect(fs.access(promptPath)).rejects.toBeDefined() + + try { + const managedManifest = JSON.parse(await fs.readFile(managedManifestPath, "utf8")) as { syncPrompts?: unknown[] } + expect(managedManifest.syncPrompts ?? []).toHaveLength(0) + } catch { + await expect(fs.access(managedManifestPath)).rejects.toBeDefined() + } + }) + + test("removes renamed synced prompts after a later verified rerun from a legacy prompt filename", async () => { + const tempRoot = await fs.mkdtemp(path.join(os.tmpdir(), "sync-pi-legacy-prompt-rename-")) + const stateHome = path.join(tempRoot, "state-home") + process.env.COMPOUND_ENGINEERING_HOME = stateHome + + const legacyPromptPath = path.join(tempRoot, "prompts", "plan_review.md") + await fs.mkdir(path.dirname(legacyPromptPath), { recursive: true }) + await fs.writeFile(legacyPromptPath, "legacy prompt body\n") + + await syncToPi({ + skills: [], + commands: [ + { + name: "plan_review", + description: "Personal review", + body: "Review body", + sourcePath: path.join(tempRoot, "commands", "plan_review.md"), + }, + ], + mcpServers: {}, + }, tempRoot) + + expect(await fs.readFile(path.join(tempRoot, "prompts", "plan-review.md"), "utf8")).toContain("Review body") + + await syncToPi({ + skills: [], + commands: [ + { + name: "plan_review", + description: "Personal review", + body: "Review body", + sourcePath: path.join(tempRoot, "commands", "plan_review.md"), + }, + ], + mcpServers: {}, + }, tempRoot) + + await expect(fs.access(legacyPromptPath)).rejects.toBeDefined() + + delete process.env.COMPOUND_ENGINEERING_HOME + }) + + test("removes deleted synced skills on a later verified rerun", async () => { + const tempRoot = await fs.mkdtemp(path.join(os.tmpdir(), "sync-pi-skill-deletion-")) + const stateHome = path.join(tempRoot, "state-home") + process.env.COMPOUND_ENGINEERING_HOME = stateHome + const sourceSkillDir = path.join(tempRoot, "claude-skill") + + await fs.mkdir(sourceSkillDir, { recursive: true }) + await fs.writeFile( + path.join(sourceSkillDir, "SKILL.md"), + [ + "---", + "name: docs-skill", + "description: Sync deletion test", + "---", + "", + "Body", + ].join("\n"), + ) + + await syncToPi({ + skills: [ + { + name: "docs-skill", + sourceDir: sourceSkillDir, + skillPath: path.join(sourceSkillDir, "SKILL.md"), + }, + ], + mcpServers: {}, + }, tempRoot) + + expect(await fs.readFile(path.join(tempRoot, "skills", "docs-skill", "SKILL.md"), "utf8")).toContain("docs-skill") + + await syncToPi({ + skills: [], + mcpServers: {}, + }, tempRoot) + + await expect(fs.access(path.join(tempRoot, "skills", "docs-skill"))).rejects.toBeDefined() + + delete process.env.COMPOUND_ENGINEERING_HOME + }) + + test("removes stale synced MCP servers when Claude-home config deletes them", async () => { + const tempRoot = await fs.mkdtemp(path.join(os.tmpdir(), "sync-pi-mcp-removal-")) + const mcporterPath = path.join(tempRoot, "compound-engineering", "mcporter.json") + + await syncToPi({ + skills: [], + mcpServers: { + context7: { url: "https://mcp.context7.com/mcp" }, + }, + }, tempRoot) + + let mcporter = JSON.parse(await fs.readFile(mcporterPath, "utf8")) as { mcpServers: Record } + expect(mcporter.mcpServers.context7).toBeDefined() + + await syncToPi({ + skills: [], + mcpServers: {}, + }, tempRoot) + + await expect(fs.access(mcporterPath)).rejects.toBeDefined() + }) + + test("does not remove unrelated MCP servers claimed only by an unverified sync manifest", async () => { + const tempRoot = await fs.mkdtemp(path.join(os.tmpdir(), "sync-pi-forged-mcp-")) + const mcporterPath = path.join(tempRoot, "compound-engineering", "mcporter.json") + const managedManifestPath = path.join(tempRoot, "compound-engineering", "compound-engineering-managed.json") + + await fs.mkdir(path.dirname(mcporterPath), { recursive: true }) + await fs.writeFile( + mcporterPath, + JSON.stringify({ mcpServers: { unrelated: { baseUrl: "https://example.com/mcp" } } }, null, 2), + ) + await fs.writeFile( + managedManifestPath, + JSON.stringify({ + version: 1, + sync: { + mcpServers: ["unrelated"], + }, + }, null, 2), + ) + + await syncToPi({ + skills: [], + mcpServers: {}, + }, tempRoot) + + const mcporter = JSON.parse(await fs.readFile(mcporterPath, "utf8")) as { mcpServers: Record } + expect(mcporter.mcpServers.unrelated).toBeDefined() + }) + + test("removes the live unverified legacy compat extension on empty sync reruns", async () => { + const tempRoot = await fs.mkdtemp(path.join(os.tmpdir(), "sync-pi-legacy-compat-preserve-")) + const stateHome = path.join(tempRoot, "state-home") + process.env.COMPOUND_ENGINEERING_HOME = stateHome + const warnSpy = spyOn(console, "warn").mockImplementation(() => {}) + const legacyCompatPath = path.join(tempRoot, "extensions", "compound-engineering-compat.ts") + await fs.mkdir(path.dirname(legacyCompatPath), { recursive: true }) + await fs.writeFile(legacyCompatPath, "legacy compat\n") + + await syncToPi({ + skills: [], + commands: [], + mcpServers: {}, + }, tempRoot) + + const layout = resolvePiLayout(tempRoot, "sync") + const trust = await loadPiManagedStateWithTrust(layout) + expect(trust.status).toBe("missing") + expect(trust.state).toBeNull() + await expect(fs.access(legacyCompatPath)).rejects.toBeDefined() + expect(warnSpy).toHaveBeenCalledWith(expect.stringContaining("legacy compat extension")) + + await syncToPi({ + skills: [], + commands: [], + mcpServers: {}, + }, tempRoot) + + await expect(fs.access(legacyCompatPath)).rejects.toBeDefined() + + warnSpy.mockRestore() + delete process.env.COMPOUND_ENGINEERING_HOME + }) + + test("two-pass upgrade from legacy sync artifacts converges and removes only now-provable stale outputs", async () => { + const tempRoot = await fs.mkdtemp(path.join(os.tmpdir(), "sync-pi-two-pass-upgrade-")) + const stateHome = path.join(tempRoot, "state-home") + process.env.COMPOUND_ENGINEERING_HOME = stateHome + + const legacyCompatPath = path.join(tempRoot, "extensions", "compound-engineering-compat.ts") + const legacyPromptPath = path.join(tempRoot, "prompts", "plan_review.md") + const ambiguousPromptPath = path.join(tempRoot, "prompts", "manual-note.md") + await fs.mkdir(path.dirname(legacyCompatPath), { recursive: true }) + await fs.mkdir(path.dirname(legacyPromptPath), { recursive: true }) + await fs.writeFile(legacyCompatPath, "legacy compat\n") + await fs.writeFile(legacyPromptPath, "legacy review\n") + await fs.writeFile(ambiguousPromptPath, "manual note\n") + + await syncToPi({ + skills: [], + commands: [ + { + name: "plan_review", + description: "Review", + body: "Review body", + sourcePath: path.join(tempRoot, "commands", "plan_review.md"), + }, + ], + mcpServers: {}, + }, tempRoot) + + expect(await fs.readFile(path.join(tempRoot, "prompts", "plan-review.md"), "utf8")).toContain("Review body") + expect(await fs.readFile(ambiguousPromptPath, "utf8")).toContain("manual note") + expect(await fs.readFile(legacyCompatPath, "utf8")).toContain('name: "ce_subagent"') + + await syncToPi({ + skills: [], + commands: [], + mcpServers: {}, + }, tempRoot) + + await expect(fs.access(path.join(tempRoot, "prompts", "plan-review.md"))).rejects.toBeDefined() + await expect(fs.access(legacyCompatPath)).rejects.toBeDefined() + expect(await fs.readFile(ambiguousPromptPath, "utf8")).toContain("manual note") + + delete process.env.COMPOUND_ENGINEERING_HOME + }) + + test("unverified sync ownership does not delete shared mcporter config still needed by user state", async () => { + const tempRoot = await fs.mkdtemp(path.join(os.tmpdir(), "sync-pi-partial-trust-shared-resource-")) + const stateHome = path.join(tempRoot, "state-home") + process.env.COMPOUND_ENGINEERING_HOME = stateHome + const layout = resolvePiLayout(tempRoot, "sync") + + const seededState = replacePiManagedSection(null, "sync", createPiManagedSection({ + mcpServers: ["sync-owned"], + sharedResources: { mcporterConfig: true }, + }), "compound-engineering") + + await writePiManagedState(layout, seededState, { install: false, sync: true }) + const manifest = JSON.parse(await fs.readFile(layout.managedManifestPath, "utf8")) as { + sync?: { sharedResources?: { mcporterConfig?: boolean } } + } + manifest.sync = { + ...(manifest.sync ?? {}), + sharedResources: { mcporterConfig: false }, + } + await fs.writeFile(layout.managedManifestPath, JSON.stringify(manifest, null, 2) + "\n") + await fs.mkdir(path.dirname(layout.mcporterConfigPath), { recursive: true }) + await fs.writeFile( + layout.mcporterConfigPath, + JSON.stringify({ + mcpServers: { + "install-owned": { command: "install-cmd" }, + "sync-owned": { command: "sync-cmd" }, + unrelated: { command: "user-cmd" }, + }, + }, null, 2) + "\n", + ) + + await syncToPi({ + skills: [], + mcpServers: {}, + }, tempRoot) + + const mcporter = JSON.parse(await fs.readFile(layout.mcporterConfigPath, "utf8")) as { + mcpServers: Record + } + expect(mcporter.mcpServers["sync-owned"]).toBeDefined() + expect(mcporter.mcpServers.unrelated).toBeDefined() + + delete process.env.COMPOUND_ENGINEERING_HOME + }) + + test("preserves ambiguous legacy leftovers and warns instead of deleting heuristically", async () => { + const tempRoot = await fs.mkdtemp(path.join(os.tmpdir(), "sync-pi-legacy-ambiguous-")) + const stateHome = path.join(tempRoot, "state-home") + process.env.COMPOUND_ENGINEERING_HOME = stateHome + const warnSpy = spyOn(console, "warn").mockImplementation(() => {}) + + const sourceSkillDir = path.join(tempRoot, "claude-skill") + await fs.mkdir(sourceSkillDir, { recursive: true }) + await fs.writeFile( + path.join(sourceSkillDir, "SKILL.md"), + [ + "---", + "name: docs_skill", + "description: Legacy migration ambiguity test", + "---", + ].join("\n"), + ) + + const shadowPromptPath = path.join(tempRoot, "prompts", "docs-skill.md") + const shadowSkillPath = path.join(tempRoot, "skills", "docs_skill") + await fs.mkdir(path.dirname(shadowPromptPath), { recursive: true }) + await fs.mkdir(shadowSkillPath, { recursive: true }) + await fs.writeFile(shadowPromptPath, "user-owned shadow prompt\n") + await fs.writeFile(path.join(shadowSkillPath, "SKILL.md"), "user-owned shadow skill\n") + + await syncToPi({ + skills: [ + { + name: "docs_skill", + sourceDir: sourceSkillDir, + skillPath: path.join(sourceSkillDir, "SKILL.md"), + }, + ], + commands: [], + mcpServers: {}, + }, tempRoot) + + expect(await fs.readFile(shadowPromptPath, "utf8")).toContain("user-owned shadow prompt") + expect(await fs.readFile(path.join(shadowSkillPath, "SKILL.md"), "utf8")).toContain("user-owned shadow skill") + expect(warnSpy).toHaveBeenCalledWith(expect.stringContaining("ambiguous legacy Pi sync artifact")) + + warnSpy.mockRestore() + delete process.env.COMPOUND_ENGINEERING_HOME + }) + + test("removes orphaned legacy skill directories discovered from verified prior sync ownership", async () => { + const tempRoot = await fs.mkdtemp(path.join(os.tmpdir(), "sync-pi-legacy-orphaned-skill-")) + const stateHome = path.join(tempRoot, "state-home") + process.env.COMPOUND_ENGINEERING_HOME = stateHome + + const layout = resolvePiLayout(tempRoot, "sync") + const orphanedLegacySkillDir = path.join(layout.skillsDir, "docs_skill") + await fs.mkdir(orphanedLegacySkillDir, { recursive: true }) + await fs.writeFile(path.join(orphanedLegacySkillDir, "SKILL.md"), "legacy\n") + + const seededState = replacePiManagedSection(null, "sync", createPiManagedSection({ + artifacts: [createManagedArtifact(layout, "synced-skill", "docs_skill", "docs-skill-2")], + }), "compound-engineering") + await writePiManagedState(layout, seededState, { install: false, sync: true }) + + await syncToPi({ + skills: [], + commands: [], + mcpServers: {}, + }, tempRoot) + + await expect(fs.access(orphanedLegacySkillDir)).rejects.toBeDefined() + + delete process.env.COMPOUND_ENGINEERING_HOME + }) + + test("warns and still writes a verified snapshot when legacy mcporter config is malformed", async () => { + const tempRoot = await fs.mkdtemp(path.join(os.tmpdir(), "sync-pi-legacy-bad-mcporter-")) + const stateHome = path.join(tempRoot, "state-home") + process.env.COMPOUND_ENGINEERING_HOME = stateHome + const warnSpy = spyOn(console, "warn").mockImplementation(() => {}) + + const badMcporterPath = path.join(tempRoot, "compound-engineering", "mcporter.json") + await fs.mkdir(path.dirname(badMcporterPath), { recursive: true }) + await fs.writeFile(badMcporterPath, "{ not json\n") + + await syncToPi({ + skills: [], + commands: [ + { + name: "plan-review", + description: "Personal review", + body: "Review body", + sourcePath: path.join(tempRoot, "commands", "plan-review.md"), + }, + ], + mcpServers: {}, + }, tempRoot) + + const layout = resolvePiLayout(tempRoot, "sync") + const trust = await loadPiManagedStateWithTrust(layout) + expect(trust.status).toBe("verified") + expect(await fs.readFile(badMcporterPath, "utf8")).toContain("{ not json") + expect(warnSpy).toHaveBeenCalledWith(expect.stringContaining("legacy mcporter.json")) + + warnSpy.mockRestore() + delete process.env.COMPOUND_ENGINEERING_HOME + }) + + test("preserves malformed unverified project mcporter config when sync wants to write MCP servers", async () => { + const tempRoot = await fs.mkdtemp(path.join(os.tmpdir(), "sync-pi-malformed-unverified-project-mcp-")) + const stateHome = path.join(tempRoot, "state-home") + process.env.COMPOUND_ENGINEERING_HOME = stateHome + const warnSpy = spyOn(console, "warn").mockImplementation(() => {}) + const layout = resolvePiLayout(tempRoot, "sync") + + await fs.mkdir(path.dirname(layout.mcporterConfigPath), { recursive: true }) + await fs.writeFile(layout.mcporterConfigPath, "{ not json\n") + await fs.writeFile( + layout.managedManifestPath, + JSON.stringify({ + version: 1, + pluginName: "compound-engineering", + sync: { + sharedResources: { mcporterConfig: true }, + }, + }, null, 2) + "\n", + ) + + await syncToPi({ + skills: [], + commands: [], + mcpServers: { + context7: { url: "https://mcp.context7.com/mcp" }, + }, + }, tempRoot) + + expect(await fs.readFile(layout.mcporterConfigPath, "utf8")).toContain("{ not json") + expect(warnSpy).toHaveBeenCalledWith(expect.stringContaining("leaving it untouched because sync ownership cannot be proven")) + + const trust = await loadPiManagedStateWithTrust(layout) + expect(trust.status).toBe("verified") + expect(trust.state?.sync.mcpServers).toEqual([]) + expect(trust.state?.sync.sharedResources.mcporterConfig).toBe(false) + + warnSpy.mockRestore() + delete process.env.COMPOUND_ENGINEERING_HOME + }) + + test("restores prior sync managed state when stale skill cleanup fails after publication work", async () => { + const tempRoot = await fs.mkdtemp(path.join(os.tmpdir(), "sync-pi-stale-cleanup-rollback-")) + const stateHome = path.join(tempRoot, "state-home") + process.env.COMPOUND_ENGINEERING_HOME = stateHome + + const layout = resolvePiLayout(tempRoot, "sync") + const skillsParent = layout.skillsDir + const externalSkillsParent = path.join(tempRoot, "external-skills") + const externalOldSkillDir = path.join(externalSkillsParent, "old-skill") + + await fs.mkdir(externalOldSkillDir, { recursive: true }) + await fs.writeFile(path.join(externalOldSkillDir, "SKILL.md"), "old\n") + await fs.symlink(externalSkillsParent, skillsParent) + + const seededState = replacePiManagedSection(null, "sync", createPiManagedSection({ + artifacts: [createManagedArtifact(layout, "synced-skill", "old-skill", "old-skill")], + }), "compound-engineering") + await writePiManagedState(layout, seededState, { install: false, sync: true }) + + await expect(syncToPi({ + skills: [], + commands: [ + { + name: "new-note", + description: "new", + body: "new body", + sourcePath: path.join(tempRoot, "commands", "new-note.md"), + }, + ], + mcpServers: {}, + }, tempRoot)).rejects.toThrow("symlinked ancestor") + + const restored = await loadPiManagedStateWithTrust(layout) + expect(restored.status).toBe("verified") + expect(restored.state?.sync.artifacts.map((artifact) => artifact.emittedName)).toContain("old-skill") + expect(restored.state?.sync.artifacts.map((artifact) => artifact.emittedName)).not.toContain("new-note") + await expect(fs.access(path.join(layout.promptsDir, "new-note.md"))).rejects.toBeDefined() + + delete process.env.COMPOUND_ENGINEERING_HOME + }) + + test("removes stale compat extension when Claude-home Pi sync becomes empty", async () => { + const tempRoot = await fs.mkdtemp(path.join(os.tmpdir(), "sync-pi-compat-removal-")) + const compatPath = path.join(tempRoot, "extensions", "compound-engineering-compat.ts") + + await syncToPi({ + skills: [], + commands: [ + { + name: "plan-review", + description: "Personal review", + body: "Review body", + sourcePath: path.join(tempRoot, "commands", "plan-review.md"), + }, + ], + mcpServers: {}, + }, tempRoot) + + expect(await fs.readFile(compatPath, "utf8")).toContain('name: "ce_subagent"') + + await syncToPi({ + skills: [], + commands: [], + mcpServers: {}, + }, tempRoot) + + await expect(fs.access(compatPath)).rejects.toBeDefined() + const agents = await fs.readFile(path.join(tempRoot, "AGENTS.md"), "utf8") + expect(agents).not.toContain("ce_subagent") + expect(agents).toContain("compat tools are not currently installed") + }) + + test("removes the live ambiguous compat extension on empty sync while disabling advertising", async () => { + const tempRoot = await fs.mkdtemp(path.join(os.tmpdir(), "sync-pi-compat-preserve-untrusted-")) + const compatPath = path.join(tempRoot, "extensions", "compound-engineering-compat.ts") + const warnSpy = spyOn(console, "warn").mockImplementation(() => {}) + + await fs.mkdir(path.dirname(compatPath), { recursive: true }) + await fs.writeFile(compatPath, "legacy compat\n") + + await syncToPi({ + skills: [], + commands: [], + mcpServers: {}, + }, tempRoot) + + await expect(fs.access(compatPath)).rejects.toBeDefined() + const agents = await fs.readFile(path.join(tempRoot, "AGENTS.md"), "utf8") + expect(agents).not.toContain("ce_subagent") + expect(agents).toContain("compat tools are not currently installed") + expect(warnSpy).toHaveBeenCalledWith(expect.stringContaining("ambiguous legacy compat extension")) + + warnSpy.mockRestore() + }) + + test("compat runtime only uses bundled mcporter fallback when bundled manifest authorizes it", async () => { + const tempRoot = await fs.mkdtemp(path.join(os.tmpdir(), "sync-pi-bundled-mcporter-fallback-")) + const bundledDir = path.join(tempRoot, "pi-resources", "compound-engineering") + await fs.mkdir(bundledDir, { recursive: true }) + await fs.writeFile(path.join(bundledDir, "mcporter.json"), JSON.stringify({ mcpServers: { bundled: {} } }, null, 2)) + + await fs.writeFile(path.join(bundledDir, "compound-engineering-managed.json"), JSON.stringify({ + version: 1, + pluginName: "compound-engineering", + policyFingerprint: getPiPolicyFingerprint(), + install: { + sharedResources: { mcporterConfig: true }, + }, + }, null, 2)) + + let helpers = await loadCompatHelpers(tempRoot) + expect(helpers.resolveMcporterConfigPath(tempRoot)).toBe(path.join(bundledDir, "mcporter.json")) + + await fs.writeFile(path.join(bundledDir, "compound-engineering-managed.json"), JSON.stringify({ + version: 1, + pluginName: "compound-engineering", + policyFingerprint: "wrong-policy", + install: { + sharedResources: { mcporterConfig: true }, + }, + }, null, 2)) + + helpers = await loadCompatHelpers(tempRoot) + expect(helpers.resolveMcporterConfigPath(tempRoot)).toBeUndefined() + }) + + test("compat runtime does not trust bundled alias manifests by location alone", async () => { + const tempRoot = await fs.mkdtemp(path.join(os.tmpdir(), "sync-pi-bundled-alias-untrusted-")) + const bundledDir = path.join(tempRoot, "pi-resources", "compound-engineering") + await fs.mkdir(bundledDir, { recursive: true }) + await fs.writeFile(path.join(bundledDir, "compound-engineering-managed.json"), JSON.stringify({ + version: 1, + pluginName: "compound-engineering", + policyFingerprint: getPiPolicyFingerprint(), + install: { + nameMaps: { + skills: { + "compound-engineering:ce-plan": "bundled-ce-plan", + }, + }, + }, + }, null, 2)) + + const { resolveAgentName } = await loadCompatHelpers(tempRoot) + expect(() => resolveAgentName(tempRoot, "compound-engineering:ce-plan")).toThrow("Unknown qualified subagent target") + }) + + test("ce_list_capabilities reports bundled MCP availability when bundled fallback is authorized", async () => { + const tempRoot = await fs.mkdtemp(path.join(os.tmpdir(), "sync-pi-capability-bundled-mcp-")) + const bundledDir = path.join(tempRoot, "pi-resources", "compound-engineering") + await fs.mkdir(bundledDir, { recursive: true }) + await fs.writeFile(path.join(bundledDir, "mcporter.json"), JSON.stringify({ mcpServers: { bundled: {} } }, null, 2)) + await fs.writeFile(path.join(bundledDir, "compound-engineering-managed.json"), JSON.stringify({ + version: 1, + pluginName: "compound-engineering", + policyFingerprint: getPiPolicyFingerprint(), + install: { + sharedResources: { mcporterConfig: true }, + }, + }, null, 2)) + + const mod = await loadCompatHelpers(tempRoot) + const tools = new Map any }>() + mod.default({ + registerTool(tool) { + tools.set(tool.name, tool) + }, + async exec() { + return { code: 0, stdout: "ok", stderr: "" } + }, + }) + + const capabilities = tools.get("ce_list_capabilities") + expect(capabilities).toBeDefined() + const result = await capabilities!.execute("tool-call-id", {}, undefined, undefined, { cwd: tempRoot }) + const details = result.details as { + shared: { + mcporter: { + available: boolean + source: string | null + servers: string[] + provenance?: { status: string; authority: string | null } + } + } + } + expect(details.shared.mcporter).toMatchObject({ available: true, source: "bundled", servers: ["bundled"] }) + expect(details.shared.mcporter.provenance).toEqual({ status: "available", authority: "bundled" }) + }) + + test("ce_list_capabilities reports blocked project-sync provenance when unverified local MCP blocks fallback", async () => { + const tempRoot = await fs.mkdtemp(path.join(os.tmpdir(), "sync-pi-capability-blocked-local-mcp-")) + const stateHome = path.join(tempRoot, "state-home") + const fakeHome = path.join(tempRoot, "home") + const projectRoot = path.join(tempRoot, "project") + const nestedCwd = path.join(projectRoot, "apps", "docs") + const originalHome = process.env.HOME + process.env.COMPOUND_ENGINEERING_HOME = stateHome + process.env.HOME = fakeHome + + await fs.mkdir(nestedCwd, { recursive: true }) + await fs.mkdir(path.join(projectRoot, "compound-engineering"), { recursive: true }) + await fs.writeFile( + path.join(projectRoot, "compound-engineering", "compound-engineering-managed.json"), + JSON.stringify({ + version: 1, + sync: { + sharedResources: { mcporterConfig: true }, + }, + }, null, 2), + ) + await fs.writeFile(path.join(projectRoot, "compound-engineering", "mcporter.json"), JSON.stringify({ mcpServers: { local: {} } }, null, 2)) + + const globalRoot = path.join(fakeHome, ".pi", "agent") + const globalLayout = resolvePiLayout(globalRoot, "sync") + await fs.mkdir(path.dirname(globalLayout.mcporterConfigPath), { recursive: true }) + await fs.writeFile(globalLayout.mcporterConfigPath, JSON.stringify({ mcpServers: { global: {} } }, null, 2)) + await writePiManagedState( + globalLayout, + replacePiManagedSection(null, "sync", createPiManagedSection({ + nameMaps: {}, + sharedResources: { mcporterConfig: true }, + }), "compound-engineering"), + { install: false, sync: true }, + ) + + const mod = await loadCompatHelpers(projectRoot) + const tools = new Map any }>() + mod.default({ + registerTool(tool) { + tools.set(tool.name, tool) + }, + async exec() { + return { code: 0, stdout: "ok", stderr: "" } + }, + }) + + const capabilities = tools.get("ce_list_capabilities") + const result = await capabilities!.execute("tool-call-id", {}, undefined, undefined, { cwd: nestedCwd }) + const details = result.details as { + shared: { + mcporter: { + available: boolean + source: string | null + servers: string[] + provenance?: { status: string; authority: string | null } + } + } + } + + expect(details.shared.mcporter).toMatchObject({ available: false, source: null, servers: [] }) + expect(details.shared.mcporter.provenance).toEqual({ status: "blocked-unverified-project-sync", authority: null }) + + delete process.env.COMPOUND_ENGINEERING_HOME + if (originalHome === undefined) { + delete process.env.HOME + } else { + process.env.HOME = originalHome + } + }) + + test("disabled local AGENTS state does not hide globally available runtime MCP discovery", async () => { + const tempRoot = await fs.mkdtemp(path.join(os.tmpdir(), "sync-pi-global-capability-discovery-")) + const stateHome = path.join(tempRoot, "state-home") + const globalRoot = path.join(stateHome, ".pi", "agent") + process.env.COMPOUND_ENGINEERING_HOME = stateHome + process.env.HOME = stateHome + + const globalLayout = resolvePiLayout(globalRoot, "install") + const globalState = replacePiManagedSection(null, "install", createPiManagedSection({ + nameMaps: { + agents: { + "compound-engineering:ce:plan": "ce-plan-global", + }, + }, + sharedResources: { mcporterConfig: true }, + }), "compound-engineering") + await writePiManagedState(globalLayout, globalState, { install: true, sync: false }) + await fs.mkdir(path.dirname(globalLayout.mcporterConfigPath), { recursive: true }) + await fs.writeFile(globalLayout.mcporterConfigPath, JSON.stringify({ mcpServers: { global: {} } }, null, 2)) + + await syncToPi({ skills: [], commands: [], mcpServers: {} }, tempRoot) + + const agents = await fs.readFile(path.join(tempRoot, "AGENTS.md"), "utf8") + expect(agents).toContain("Verified global or bundled Compound Engineering fallbacks may still exist") + + const mod = await loadCompatHelpers(tempRoot) + const tools = new Map any }>() + mod.default({ + registerTool(tool) { + tools.set(tool.name, tool) + }, + async exec() { + return { code: 0, stdout: "ok", stderr: "" } + }, + }) + + const capabilities = tools.get("ce_list_capabilities") + const result = await capabilities!.execute("tool-call-id", {}, undefined, undefined, { cwd: tempRoot }) + const details = result.details as { + shared: { mcporter: { available: boolean; source: string | null; servers: string[] } } + } + expect(details.shared.mcporter).toMatchObject({ available: true, source: "global", servers: ["global"] }) + + delete process.env.COMPOUND_ENGINEERING_HOME + delete process.env.HOME + }) + + test("untrusted local mcporter config blocks lower-priority global fallback", async () => { + const tempRoot = await fs.mkdtemp(path.join(os.tmpdir(), "sync-pi-untrusted-local-mcp-blocks-global-")) + const stateHome = path.join(tempRoot, "state-home") + const globalRoot = path.join(stateHome, ".pi", "agent") + process.env.COMPOUND_ENGINEERING_HOME = stateHome + process.env.HOME = stateHome + + const globalLayout = resolvePiLayout(globalRoot, "install") + const globalState = replacePiManagedSection(null, "install", createPiManagedSection({ + sharedResources: { mcporterConfig: true }, + }), "compound-engineering") + await writePiManagedState(globalLayout, globalState, { install: true, sync: false }) + await fs.mkdir(path.dirname(globalLayout.mcporterConfigPath), { recursive: true }) + await fs.writeFile(globalLayout.mcporterConfigPath, JSON.stringify({ mcpServers: { global: {} } }, null, 2)) + + await fs.mkdir(path.join(tempRoot, "compound-engineering"), { recursive: true }) + await fs.writeFile(path.join(tempRoot, "compound-engineering", "mcporter.json"), JSON.stringify({ mcpServers: { local: {} } }, null, 2)) + + const mod = await loadCompatHelpers(tempRoot) + expect(mod.resolveMcporterConfigPath(tempRoot)).toBeUndefined() + + const tools = new Map any }>() + mod.default({ + registerTool(tool) { + tools.set(tool.name, tool) + }, + async exec() { + return { code: 0, stdout: "ok", stderr: "" } + }, + }) + + const capabilities = tools.get("ce_list_capabilities") + const result = await capabilities!.execute("tool-call-id", {}, undefined, undefined, { cwd: tempRoot }) + const details = result.details as { + shared: { mcporter: { available: boolean; source: string | null; servers: string[] } } + } + expect(details.shared.mcporter).toMatchObject({ available: false, source: null, servers: [] }) + + delete process.env.COMPOUND_ENGINEERING_HOME + delete process.env.HOME + }) + + test("does not derive legacy skill-directory cleanup candidates from prompt artifacts", async () => { + const tempRoot = await fs.mkdtemp(path.join(os.tmpdir(), "sync-pi-prompt-cleanup-scope-")) + const layout = resolvePiLayout(tempRoot, "sync") + const unrelatedSkillDir = path.join(layout.skillsDir, "plan-review") + + await syncToPi({ + skills: [], + commands: [ + { + name: "plan-review", + description: "Personal review", + body: "Review body", + sourcePath: path.join(tempRoot, "commands", "plan-review.md"), + }, + ], + mcpServers: {}, + }, tempRoot) + + await fs.mkdir(unrelatedSkillDir, { recursive: true }) + await fs.writeFile(path.join(unrelatedSkillDir, "SKILL.md"), "---\nname: unrelated\n---\n\nBody\n") + + await syncToPi({ + skills: [], + commands: [], + mcpServers: {}, + }, tempRoot) + + expect(await fs.readFile(path.join(unrelatedSkillDir, "SKILL.md"), "utf8")).toContain("name: unrelated") + }) + + test("keeps compat extension when verified install-owned state still exists", async () => { + const tempRoot = await fs.mkdtemp(path.join(os.tmpdir(), "sync-pi-compat-shared-root-")) + const stateHome = path.join(tempRoot, "state-home") + process.env.COMPOUND_ENGINEERING_HOME = stateHome + + const installLayout = resolvePiLayout(tempRoot, "sync") + await fs.mkdir(installLayout.extensionsDir, { recursive: true }) + await fs.writeFile(path.join(installLayout.extensionsDir, "compound-engineering-compat.ts"), "install compat\n") + await writePiManagedState( + installLayout, + replacePiManagedSection(null, "install", createPiManagedSection({ + artifacts: [], + sharedResources: { + compatExtension: true, + }, + }), "compound-engineering"), + { install: true, sync: false }, + ) + + await syncToPi({ + skills: [], + commands: [], + mcpServers: {}, + }, tempRoot) + + expect(await fs.readFile(path.join(installLayout.extensionsDir, "compound-engineering-compat.ts"), "utf8")).toContain('name: "ce_subagent"') + + delete process.env.COMPOUND_ENGINEERING_HOME + }) + + test("does not rewrite unchanged sync managed state or compat extension on no-op reruns", async () => { + const tempRoot = await fs.mkdtemp(path.join(os.tmpdir(), "sync-pi-noop-rerun-")) + const stateHome = path.join(tempRoot, "state-home") + process.env.COMPOUND_ENGINEERING_HOME = stateHome + + const config: ClaudeHomeConfig = { + skills: [], + commands: [ + { + name: "plan-review", + description: "Personal review", + body: "Review body", + sourcePath: path.join(tempRoot, "commands", "plan-review.md"), + }, + ], + mcpServers: {}, + } + + await syncToPi(config, tempRoot) + + const layout = resolvePiLayout(tempRoot, "sync") + const compatPath = path.join(layout.extensionsDir, "compound-engineering-compat.ts") + const promptPath = path.join(layout.promptsDir, "plan-review.md") + const firstManifest = await fs.stat(layout.managedManifestPath) + const firstVerification = await fs.stat(layout.verificationPath) + const firstCompat = await fs.readFile(compatPath, "utf8") + const firstPrompt = await fs.stat(promptPath) + + await new Promise((resolve) => setTimeout(resolve, 15)) + await syncToPi(config, tempRoot) + + const secondManifest = await fs.stat(layout.managedManifestPath) + const secondVerification = await fs.stat(layout.verificationPath) + const secondCompat = await fs.readFile(compatPath, "utf8") + const secondPrompt = await fs.stat(promptPath) + expect(secondManifest.mtimeMs).toBe(firstManifest.mtimeMs) + expect(secondVerification.mtimeMs).toBe(firstVerification.mtimeMs) + expect(secondCompat).toBe(firstCompat) + expect(secondPrompt.mtimeMs).toBe(firstPrompt.mtimeMs) + expect((await fs.readdir(layout.root)).some((entry) => entry.startsWith(".pi-sync-rollback-"))).toBe(false) + + delete process.env.COMPOUND_ENGINEERING_HOME + }) + + test("does not snapshot unchanged shared sync files on no-op reruns", async () => { + const tempRoot = await fs.mkdtemp(path.join(os.tmpdir(), "sync-pi-noop-shared-snapshots-")) + const stateHome = path.join(tempRoot, "state-home") + process.env.COMPOUND_ENGINEERING_HOME = stateHome + + const config: ClaudeHomeConfig = { + skills: [], + commands: [ + { + name: "plan-review", + description: "Personal review", + body: "Review body", + sourcePath: path.join(tempRoot, "commands", "plan-review.md"), + }, + ], + mcpServers: {}, + } + + await syncToPi(config, tempRoot) + const layout = resolvePiLayout(tempRoot, "sync") + const compatPath = path.join(layout.extensionsDir, "compound-engineering-compat.ts") + const snapshottedPaths: string[] = [] + setManagedPathSnapshotHookForTests((targetPath) => { + snapshottedPaths.push(targetPath) + }) + + await syncToPi(config, tempRoot) + + expect(snapshottedPaths).not.toContain(layout.agentsPath) + expect(snapshottedPaths).not.toContain(layout.managedManifestPath) + expect(snapshottedPaths).not.toContain(layout.verificationPath) + expect(snapshottedPaths).not.toContain(compatPath) + + delete process.env.COMPOUND_ENGINEERING_HOME + }) + + test("does not create rollback temp dirs on no-op sync reruns for unchanged skills", async () => { + const tempRoot = await fs.mkdtemp(path.join(os.tmpdir(), "sync-pi-noop-skill-rerun-")) + const stateHome = path.join(tempRoot, "state-home") + const sourceSkillDir = path.join(tempRoot, "claude-skill") + process.env.COMPOUND_ENGINEERING_HOME = stateHome + + await fs.mkdir(path.join(sourceSkillDir, "nested"), { recursive: true }) + await fs.writeFile(path.join(sourceSkillDir, "SKILL.md"), "---\nname: docs-skill\n---\n\nBody\n") + await fs.writeFile(path.join(sourceSkillDir, "nested", "stable.txt"), "stable\n") + + const config: ClaudeHomeConfig = { + skills: [ + { + name: "docs-skill", + sourceDir: sourceSkillDir, + skillPath: path.join(sourceSkillDir, "SKILL.md"), + }, + ], + mcpServers: {}, + } + + await syncToPi(config, tempRoot) + + const layout = resolvePiLayout(tempRoot, "sync") + await new Promise((resolve) => setTimeout(resolve, 15)) + await syncToPi(config, tempRoot) + + expect((await fs.readdir(layout.root)).some((entry) => entry.startsWith(".pi-sync-rollback-"))).toBe(false) + + delete process.env.COMPOUND_ENGINEERING_HOME + }) + + test("does not snapshot unchanged synced skill directories on no-op reruns", async () => { + const tempRoot = await fs.mkdtemp(path.join(os.tmpdir(), "sync-pi-noop-skill-snapshot-")) + const stateHome = path.join(tempRoot, "state-home") + const sourceSkillDir = path.join(tempRoot, "claude-skill") + process.env.COMPOUND_ENGINEERING_HOME = stateHome + + await fs.mkdir(path.join(sourceSkillDir, "nested"), { recursive: true }) + await fs.writeFile(path.join(sourceSkillDir, "SKILL.md"), "---\nname: docs-skill\n---\n\nBody\n") + await fs.writeFile(path.join(sourceSkillDir, "nested", "stable.txt"), "stable\n") + + const config: ClaudeHomeConfig = { + skills: [ + { + name: "docs-skill", + sourceDir: sourceSkillDir, + skillPath: path.join(sourceSkillDir, "SKILL.md"), + }, + ], + mcpServers: {}, + } + + await syncToPi(config, tempRoot) + + const targetSkillDir = path.join(resolvePiLayout(tempRoot, "sync").skillsDir, "docs-skill") + const snapshottedPaths: string[] = [] + setManagedPathSnapshotHookForTests((targetPath) => { + snapshottedPaths.push(targetPath) + }) + + await syncToPi(config, tempRoot) + + expect(snapshottedPaths).not.toContain(targetSkillDir) + + delete process.env.COMPOUND_ENGINEERING_HOME + }) + + test("avoids whole-directory publication snapshots for incremental synced skill updates", async () => { + const tempRoot = await fs.mkdtemp(path.join(os.tmpdir(), "sync-pi-incremental-skill-snapshot-")) + const stateHome = path.join(tempRoot, "state-home") + const sourceSkillDir = path.join(tempRoot, "claude-skill") + process.env.COMPOUND_ENGINEERING_HOME = stateHome + + await fs.mkdir(path.join(sourceSkillDir, "nested"), { recursive: true }) + await fs.writeFile(path.join(sourceSkillDir, "SKILL.md"), "---\nname: docs-skill\n---\n\nBody\n") + await fs.writeFile(path.join(sourceSkillDir, "nested", "stable.txt"), "stable\n") + + const config: ClaudeHomeConfig = { + skills: [ + { + name: "docs-skill", + sourceDir: sourceSkillDir, + skillPath: path.join(sourceSkillDir, "SKILL.md"), + }, + ], + mcpServers: {}, + } + + await syncToPi(config, tempRoot) + + const targetSkillDir = path.join(resolvePiLayout(tempRoot, "sync").skillsDir, "docs-skill") + await fs.writeFile(path.join(sourceSkillDir, "nested", "stable.txt"), "updated\n") + + const snapshottedPaths: string[] = [] + setManagedPathSnapshotHookForTests((targetPath) => { + snapshottedPaths.push(targetPath) + }) + + await syncToPi(config, tempRoot) + + expect(snapshottedPaths).not.toContain(targetSkillDir) + expect(await fs.readFile(path.join(targetSkillDir, "nested", "stable.txt"), "utf8")).toBe("updated\n") + + delete process.env.COMPOUND_ENGINEERING_HOME + }) + + test("does not perform full deep compare for unchanged synced skill directories on stable reruns", async () => { + const tempRoot = await fs.mkdtemp(path.join(os.tmpdir(), "sync-pi-skill-fast-path-")) + const stateHome = path.join(tempRoot, "state-home") + const sourceSkillDir = path.join(tempRoot, "claude-skill") + process.env.COMPOUND_ENGINEERING_HOME = stateHome + + await fs.mkdir(path.join(sourceSkillDir, "nested"), { recursive: true }) + await fs.writeFile(path.join(sourceSkillDir, "SKILL.md"), "---\nname: docs-skill\n---\n\nBody\n") + await fs.writeFile(path.join(sourceSkillDir, "nested", "stable.txt"), "stable\n") + + const config: ClaudeHomeConfig = { + skills: [ + { + name: "docs-skill", + sourceDir: sourceSkillDir, + skillPath: path.join(sourceSkillDir, "SKILL.md"), + }, + ], + mcpServers: {}, + } + + await syncToPi(config, tempRoot) + await syncToPi(config, tempRoot) + + let fullCompareCalls = 0 + setPiSkillFullCompareHookForTests(() => { + fullCompareCalls += 1 + }) + let sourceFingerprintCalls = 0 + setPiSkillSourceFingerprintHookForTests(() => { + sourceFingerprintCalls += 1 + }) + + await syncToPi(config, tempRoot) + + expect(fullCompareCalls).toBe(0) + expect(sourceFingerprintCalls).toBe(0) + + delete process.env.COMPOUND_ENGINEERING_HOME + }) + + test("falls back to full deep compare when a synced skill tree changes", async () => { + const tempRoot = await fs.mkdtemp(path.join(os.tmpdir(), "sync-pi-skill-fast-path-fallback-")) + const stateHome = path.join(tempRoot, "state-home") + const sourceSkillDir = path.join(tempRoot, "claude-skill") + process.env.COMPOUND_ENGINEERING_HOME = stateHome + + await fs.mkdir(path.join(sourceSkillDir, "nested"), { recursive: true }) + await fs.writeFile(path.join(sourceSkillDir, "SKILL.md"), "---\nname: docs-skill\n---\n\nBody\n") + await fs.writeFile(path.join(sourceSkillDir, "nested", "stable.txt"), "stable\n") + + const config: ClaudeHomeConfig = { + skills: [ + { + name: "docs-skill", + sourceDir: sourceSkillDir, + skillPath: path.join(sourceSkillDir, "SKILL.md"), + }, + ], + mcpServers: {}, + } + + await syncToPi(config, tempRoot) + await syncToPi(config, tempRoot) + + await fs.writeFile(path.join(sourceSkillDir, "nested", "stable.txt"), "changed\n") + + let fullCompareCalls = 0 + setPiSkillFullCompareHookForTests(() => { + fullCompareCalls += 1 + }) + let sourceFingerprintCalls = 0 + setPiSkillSourceFingerprintHookForTests(() => { + sourceFingerprintCalls += 1 + }) + + await syncToPi(config, tempRoot) + + expect(fullCompareCalls).toBeGreaterThan(0) + expect(sourceFingerprintCalls).toBeGreaterThan(0) + + delete process.env.COMPOUND_ENGINEERING_HOME + }) + + test("invalidates synced skill fast paths when the Pi policy fingerprint changes", async () => { + const tempRoot = await fs.mkdtemp(path.join(os.tmpdir(), "sync-pi-policy-fast-path-")) + const stateHome = path.join(tempRoot, "state-home") + const sourceSkillDir = path.join(tempRoot, "claude-skill") + process.env.COMPOUND_ENGINEERING_HOME = stateHome + + await fs.mkdir(path.join(sourceSkillDir, "nested"), { recursive: true }) + await fs.writeFile(path.join(sourceSkillDir, "SKILL.md"), "---\nname: docs-skill\n---\n\nBody\n") + await fs.writeFile(path.join(sourceSkillDir, "nested", "stable.txt"), "stable\n") + + const config: ClaudeHomeConfig = { + skills: [ + { + name: "docs-skill", + sourceDir: sourceSkillDir, + skillPath: path.join(sourceSkillDir, "SKILL.md"), + }, + ], + mcpServers: {}, + } + + setPiPolicyFingerprintForTests("policy-v1") + await syncToPi(config, tempRoot) + await syncToPi(config, tempRoot) + + let fullCompareCalls = 0 + let sourceFingerprintCalls = 0 + setPiSkillFullCompareHookForTests(() => { + fullCompareCalls += 1 + }) + setPiSkillSourceFingerprintHookForTests(() => { + sourceFingerprintCalls += 1 + }) + + setPiPolicyFingerprintForTests("policy-v2") + await syncToPi(config, tempRoot) + + expect(fullCompareCalls).toBeGreaterThan(0) + expect(sourceFingerprintCalls).toBeGreaterThan(0) + + delete process.env.COMPOUND_ENGINEERING_HOME + }) + + test("invalidates synced skill fast paths when install name maps change rendered output", async () => { + const tempRoot = await fs.mkdtemp(path.join(os.tmpdir(), "sync-pi-render-fast-path-")) + const stateHome = path.join(tempRoot, "state-home") + const sourceSkillDir = path.join(tempRoot, "claude-skill") + process.env.COMPOUND_ENGINEERING_HOME = stateHome + + await fs.mkdir(sourceSkillDir, { recursive: true }) + await fs.writeFile( + path.join(sourceSkillDir, "SKILL.md"), + "---\nname: docs-skill\n---\n\n- /skill:compound-engineering:ce-plan\n- Task compound-engineering:ce-plan(feature_description)\n", + ) + + await seedVerifiedProjectInstallNameMaps(tempRoot, { + skills: { + "compound-engineering:ce-plan": "ce-plan-v1", + }, + }) + + const config: ClaudeHomeConfig = { + skills: [ + { + name: "docs-skill", + sourceDir: sourceSkillDir, + skillPath: path.join(sourceSkillDir, "SKILL.md"), + }, + ], + mcpServers: {}, + } + + await syncToPi(config, tempRoot) + await syncToPi(config, tempRoot) + + let fullCompareCalls = 0 + setPiSkillFullCompareHookForTests(() => { + fullCompareCalls += 1 + }) + + await seedVerifiedProjectInstallNameMaps(tempRoot, { + skills: { + "compound-engineering:ce-plan": "ce-plan-v2", + }, + }) + + await syncToPi(config, tempRoot) + + const syncedSkill = await fs.readFile(path.join(tempRoot, "skills", "docs-skill", "SKILL.md"), "utf8") + expect(syncedSkill).toContain("/skill:ce-plan-v2") + expect(syncedSkill).toContain('Run ce_subagent with agent="ce-plan-v2" and task="feature_description".') + expect(fullCompareCalls).toBeGreaterThan(0) + + delete process.env.COMPOUND_ENGINEERING_HOME + }) + + test("reuses one deep source analysis pass when a synced skill rerun misses the fast path", async () => { + const tempRoot = await fs.mkdtemp(path.join(os.tmpdir(), "sync-pi-source-analysis-collapse-")) + const stateHome = path.join(tempRoot, "state-home") + const sourceSkillDir = path.join(tempRoot, "claude-skill") + process.env.COMPOUND_ENGINEERING_HOME = stateHome + + await fs.mkdir(path.join(sourceSkillDir, "nested"), { recursive: true }) + await fs.writeFile(path.join(sourceSkillDir, "SKILL.md"), "---\nname: docs-skill\n---\n\nBody\n") + await fs.writeFile(path.join(sourceSkillDir, "nested", "stable.txt"), "stable\n") + + const config: ClaudeHomeConfig = { + skills: [ + { + name: "docs-skill", + sourceDir: sourceSkillDir, + skillPath: path.join(sourceSkillDir, "SKILL.md"), + }, + ], + mcpServers: {}, + } + + setPiPolicyFingerprintForTests("policy-v1") + await syncToPi(config, tempRoot) + await syncToPi(config, tempRoot) + + let sourceAnalysisCalls = 0 + setPiSkillSourceAnalysisHookForTests(() => { + sourceAnalysisCalls += 1 + }) + + setPiPolicyFingerprintForTests("policy-v2") + await syncToPi(config, tempRoot) + + expect(sourceAnalysisCalls).toBe(1) + + delete process.env.COMPOUND_ENGINEERING_HOME + }) + + test("falls back to one planning pass when the synced skill fast-path record is malformed", async () => { + const tempRoot = await fs.mkdtemp(path.join(os.tmpdir(), "sync-pi-malformed-fast-path-record-")) + const stateHome = path.join(tempRoot, "state-home") + const sourceSkillDir = path.join(tempRoot, "claude-skill") + process.env.COMPOUND_ENGINEERING_HOME = stateHome + + await fs.mkdir(path.join(sourceSkillDir, "nested"), { recursive: true }) + await fs.writeFile(path.join(sourceSkillDir, "SKILL.md"), "---\nname: docs-skill\n---\n\nBody\n") + await fs.writeFile(path.join(sourceSkillDir, "nested", "stable.txt"), "stable\n") + + const config: ClaudeHomeConfig = { + skills: [ + { + name: "docs-skill", + sourceDir: sourceSkillDir, + skillPath: path.join(sourceSkillDir, "SKILL.md"), + }, + ], + mcpServers: {}, + } + + await syncToPi(config, tempRoot) + await syncToPi(config, tempRoot) + + const targetDir = path.join(tempRoot, "skills", "docs-skill") + const recordPath = path.join(stateHome, ".compound-engineering", "pi-skill-fingerprints", createHash("sha256").update(path.resolve(targetDir)).digest("hex") + ".json") + await fs.writeFile(recordPath, "{ invalid json\n") + + let fullCompareCalls = 0 + let sourceAnalysisCalls = 0 + setPiSkillFullCompareHookForTests(() => { + fullCompareCalls += 1 + }) + setPiSkillSourceAnalysisHookForTests(() => { + sourceAnalysisCalls += 1 + }) + + await syncToPi(config, tempRoot) + + expect(fullCompareCalls).toBe(1) + expect(sourceAnalysisCalls).toBe(1) + + delete process.env.COMPOUND_ENGINEERING_HOME + }) + + test("repairs drifted target content even when the source skill tree is unchanged", async () => { + const tempRoot = await fs.mkdtemp(path.join(os.tmpdir(), "sync-pi-target-drift-repair-")) + const stateHome = path.join(tempRoot, "state-home") + const sourceSkillDir = path.join(tempRoot, "claude-skill") + process.env.COMPOUND_ENGINEERING_HOME = stateHome + + await fs.mkdir(sourceSkillDir, { recursive: true }) + await fs.writeFile(path.join(sourceSkillDir, "SKILL.md"), "---\nname: docs-skill\n---\n\nBody\n") + + const config: ClaudeHomeConfig = { + skills: [ + { + name: "docs-skill", + sourceDir: sourceSkillDir, + skillPath: path.join(sourceSkillDir, "SKILL.md"), + }, + ], + mcpServers: {}, + } + + await syncToPi(config, tempRoot) + const targetSkillPath = path.join(tempRoot, "skills", "docs-skill", "SKILL.md") + await fs.writeFile(targetSkillPath, "drifted\n") + + let fullCompareCalls = 0 + setPiSkillFullCompareHookForTests(() => { + fullCompareCalls += 1 + }) + + await syncToPi(config, tempRoot) + + expect(await fs.readFile(targetSkillPath, "utf8")).toContain("name: docs-skill") + expect(fullCompareCalls).toBeGreaterThan(0) + + delete process.env.COMPOUND_ENGINEERING_HOME + }) + + test("replace-path synced skill writes do not immediately rebuild fast-path analysis state", async () => { + const tempRoot = await fs.mkdtemp(path.join(os.tmpdir(), "sync-pi-replace-no-postcopy-analysis-")) + const stateHome = path.join(tempRoot, "state-home") + const sourceSkillDir = path.join(tempRoot, "claude-skill") + process.env.COMPOUND_ENGINEERING_HOME = stateHome + + await fs.mkdir(sourceSkillDir, { recursive: true }) + await fs.writeFile(path.join(sourceSkillDir, "SKILL.md"), "---\nname: docs-skill\n---\n\nBody\n") + + let sourceAnalysisCalls = 0 + setPiSkillSourceAnalysisHookForTests(() => { + sourceAnalysisCalls += 1 + }) + + await syncToPi({ + skills: [ + { + name: "docs-skill", + sourceDir: sourceSkillDir, + skillPath: path.join(sourceSkillDir, "SKILL.md"), + }, + ], + mcpServers: {}, + }, tempRoot) + + expect(sourceAnalysisCalls).toBe(0) + + delete process.env.COMPOUND_ENGINEERING_HOME + }) + + test("rejects symlinked compat extension targets during sync writes", async () => { + const tempRoot = await fs.mkdtemp(path.join(os.tmpdir(), "sync-pi-compat-symlink-")) + const externalCompat = path.join(tempRoot, "external-compat.ts") + const compatPath = path.join(tempRoot, "extensions", "compound-engineering-compat.ts") + + await fs.mkdir(path.dirname(compatPath), { recursive: true }) + await fs.writeFile(externalCompat, "external compat\n") + await fs.symlink(externalCompat, compatPath) + + await expect(syncToPi({ + skills: [], + commands: [ + { + name: "plan-review", + description: "Personal review", + body: "Review body", + sourcePath: path.join(tempRoot, "commands", "plan-review.md"), + }, + ], + mcpServers: {}, + }, tempRoot)).rejects.toThrow("Refusing to write through symlink target") + + expect(await fs.readFile(externalCompat, "utf8")).toBe("external compat\n") + }) + + test("restores prior mcporter config when publication fails after merge begins", async () => { + const tempRoot = await fs.mkdtemp(path.join(os.tmpdir(), "sync-pi-mcporter-rollback-")) + const configPath = path.join(tempRoot, "compound-engineering", "mcporter.json") + + await fs.mkdir(path.dirname(configPath), { recursive: true }) + await fs.writeFile(configPath, JSON.stringify({ mcpServers: { existing: { command: "keep" } } }, null, 2) + "\n") + setAtomicWriteFailureHookForTests((filePath, stage) => { + if (filePath === configPath && stage === "beforeRename") { + throw new Error("simulated mcporter failure") + } + }) + + await expect(syncToPi({ + skills: [], + mcpServers: { + context7: { url: "https://mcp.context7.com/mcp" }, + }, + }, tempRoot)).rejects.toThrow("simulated mcporter failure") + + const restored = JSON.parse(await fs.readFile(configPath, "utf8")) as { mcpServers: Record } + expect(restored.mcpServers.existing?.command).toBe("keep") + expect(restored.mcpServers.context7).toBeUndefined() + }) + + test("keeps the prior verified sync state when mcporter publication fails", async () => { + const tempRoot = await fs.mkdtemp(path.join(os.tmpdir(), "sync-pi-mcporter-trust-boundary-")) + const stateHome = path.join(tempRoot, "state-home") + const configPath = path.join(tempRoot, "compound-engineering", "mcporter.json") + process.env.COMPOUND_ENGINEERING_HOME = stateHome + + await syncToPi({ + skills: [], + commands: [ + { + name: "old-review", + description: "Old review", + body: "Old review body", + sourcePath: path.join(tempRoot, "commands", "old-review.md"), + }, + ], + mcpServers: {}, + }, tempRoot) + + setAtomicWriteFailureHookForTests((filePath, stage) => { + if (filePath === configPath && stage === "beforeRename") { + throw new Error("simulated mcporter failure") + } + }) + + await expect(syncToPi({ + skills: [], + commands: [ + { + name: "new-review", + description: "New review", + body: "New review body", + sourcePath: path.join(tempRoot, "commands", "new-review.md"), + }, + ], + mcpServers: { + context7: { url: "https://mcp.context7.com/mcp" }, + }, + }, tempRoot)).rejects.toThrow("simulated mcporter failure") + + const trust = await loadPiManagedStateWithTrust(resolvePiLayout(tempRoot, "sync")) + expect(trust.status).toBe("verified") + expect(trust.state?.sync.artifacts.map((artifact) => artifact.emittedName)).toEqual(["old-review"]) + expect(trust.state?.sync.mcpServers).toEqual([]) + expect(await fs.readFile(path.join(tempRoot, "prompts", "old-review.md"), "utf8")).toContain("Old review body") + await expect(fs.access(path.join(tempRoot, "prompts", "new-review.md"))).rejects.toBeDefined() + + delete process.env.COMPOUND_ENGINEERING_HOME + }) + + test("removes newly written sync prompts when managed state commit fails", async () => { + const tempRoot = await fs.mkdtemp(path.join(os.tmpdir(), "sync-pi-manifest-rollback-prompt-")) + const stateHome = path.join(tempRoot, "state-home") + process.env.COMPOUND_ENGINEERING_HOME = stateHome + + await syncToPi({ + skills: [], + commands: [ + { + name: "old-review", + description: "Old review", + body: "Old review body", + sourcePath: path.join(tempRoot, "commands", "old-review.md"), + }, + ], + mcpServers: {}, + }, tempRoot) + + const layout = resolvePiLayout(tempRoot, "sync") + setAtomicWriteFailureHookForTests((filePath, stage) => { + if (filePath === layout.managedManifestPath && stage === "beforeRename") { + throw new Error("simulated sync manifest failure") + } + }) + + await expect(syncToPi({ + skills: [], + commands: [ + { + name: "new-review", + description: "New review", + body: "New review body", + sourcePath: path.join(tempRoot, "commands", "new-review.md"), + }, + ], + mcpServers: {}, + }, tempRoot)).rejects.toThrow("simulated sync manifest failure") + + const trust = await loadPiManagedStateWithTrust(layout) + expect(trust.status).toBe("verified") + expect(trust.state?.sync.artifacts.map((artifact) => artifact.emittedName)).toEqual(["old-review"]) + expect(await fs.readFile(path.join(tempRoot, "prompts", "old-review.md"), "utf8")).toContain("Old review body") + await expect(fs.access(path.join(tempRoot, "prompts", "new-review.md"))).rejects.toBeDefined() + + delete process.env.COMPOUND_ENGINEERING_HOME + }) + + test("skips dangling symlinked file assets during Pi sync materialization", async () => { + const tempRoot = await fs.mkdtemp(path.join(os.tmpdir(), "sync-pi-dangling-symlink-")) + const sourceSkillDir = path.join(tempRoot, "claude-skill") + const missingAssetPath = path.join(tempRoot, "missing.txt") + + await fs.mkdir(sourceSkillDir, { recursive: true }) + await fs.symlink(missingAssetPath, path.join(sourceSkillDir, "asset.txt")) + await fs.writeFile( + path.join(sourceSkillDir, "SKILL.md"), + [ + "---", + "name: ce-plan", + "description: Plan workflow", + "---", + "", + "- Task compound-engineering:research:repo-research-analyst(feature_description)", + ].join("\n"), + ) + + const config: ClaudeHomeConfig = { + skills: [ + { + name: "ce-plan", + sourceDir: sourceSkillDir, + skillPath: path.join(sourceSkillDir, "SKILL.md"), + }, + ], + mcpServers: {}, + } + + await syncToPi(config, tempRoot) + + const copiedSkill = await fs.readFile(path.join(tempRoot, "skills", "ce-plan", "SKILL.md"), "utf8") + expect(copiedSkill).toContain('Run ce_subagent with agent="repo-research-analyst" and task="feature_description".') + await expect(fs.access(path.join(tempRoot, "skills", "ce-plan", "asset.txt"))).rejects.toBeDefined() + + const skillsDir = path.join(tempRoot, "skills") + const before = await fs.readdir(skillsDir) + await syncToPi(config, tempRoot) + const after = await fs.readdir(skillsDir) + + expect(before.filter((entry) => entry.startsWith("ce-plan.bak."))).toHaveLength(0) + expect(after.filter((entry) => entry.startsWith("ce-plan.bak."))).toHaveLength(0) + }) + + test("rejects cyclic directory symlinks during Pi sync materialization", async () => { + const tempRoot = await fs.mkdtemp(path.join(os.tmpdir(), "sync-pi-cycle-")) + const sourceSkillDir = path.join(tempRoot, "claude-skill") + + await fs.mkdir(sourceSkillDir, { recursive: true }) + await fs.symlink(sourceSkillDir, path.join(sourceSkillDir, "loop")) + await fs.writeFile( + path.join(sourceSkillDir, "SKILL.md"), + [ + "---", + "name: ce-plan", + "description: Plan workflow", + "---", + "", + "# Plan", + "", + "- Task compound-engineering:research:repo-research-analyst(feature_description)", + ].join("\n"), + ) + + const config: ClaudeHomeConfig = { + skills: [ + { + name: "ce-plan", + sourceDir: sourceSkillDir, + skillPath: path.join(sourceSkillDir, "SKILL.md"), + }, + ], + mcpServers: {}, + } + + await expect(syncToPi(config, tempRoot)).rejects.toThrow("cyclic directory symlink") + }) }) From fcc30d985db0f42abacfce8e03dc4bc5b45c40d5 Mon Sep 17 00:00:00 2001 From: Dragos Musetescu Date: Thu, 2 Apr 2026 16:31:02 +0000 Subject: [PATCH 2/2] fix(pi): harden skill sync and install contracts --- ...2-005-fix-pi-pr288-review-followup-plan.md | 212 +++++++ ...fix-pi-pr288-deep-review-followups-plan.md | 375 ++++++++++++ src/sync/pi.ts | 85 ++- src/targets/pi.ts | 33 +- src/utils/files.ts | 29 +- src/utils/pi-skills.ts | 213 ++++++- tests/files.test.ts | 54 ++ tests/pi-writer.test.ts | 162 ++++- tests/sync-pi.test.ts | 560 +++++++++++++++++- 9 files changed, 1634 insertions(+), 89 deletions(-) create mode 100644 docs/plans/2026-04-02-005-fix-pi-pr288-review-followup-plan.md create mode 100644 docs/plans/2026-04-02-006-fix-pi-pr288-deep-review-followups-plan.md diff --git a/docs/plans/2026-04-02-005-fix-pi-pr288-review-followup-plan.md b/docs/plans/2026-04-02-005-fix-pi-pr288-review-followup-plan.md new file mode 100644 index 000000000..8d1976cc4 --- /dev/null +++ b/docs/plans/2026-04-02-005-fix-pi-pr288-review-followup-plan.md @@ -0,0 +1,212 @@ +--- +title: "fix: Address PR #288 Pi review follow-up findings" +type: fix +status: completed +date: 2026-04-02 +pr: https://github.com/EveryInc/compound-engineering-plugin/pull/288 +sources: + - https://github.com/EveryInc/compound-engineering-plugin/pull/288#pullrequestreview-4051612811 + - docs/plans/2026-04-02-004-fix-pi-review-bug-batch-plan.md + - docs/plans/2026-04-01-003-fix-pi-capability-convergence-followups-plan.md + - docs/plans/2026-04-01-002-fix-pi-transactional-parity-followups-plan.md + - src/sync/pi.ts + - src/utils/pi-skills.ts + - tests/sync-pi.test.ts + - tests/pi-writer.test.ts +--- + +# fix: Address PR #288 Pi review follow-up findings + +## Overview + +Address the two validated Codex review findings on PR #288: one correctness bug in Pi sync convergence and one filesystem regression in Pi skill materialization. The first fix must ensure same-run prompt and skill dependencies stabilize together before managed state is derived. The second fix must preserve executable file modes when Pi materialization copies bundled files. + +## Problem Frame + +The current Pi sync loop can incorrectly demote or publish artifacts because same-run stabilization is evaluated separately for prompt results and skill results even when they depend on each other. That breaks the intended published-only contract for managed artifacts, alias maps, and rerun narrowing. Separately, Pi materialization currently rewrites copied files with the atomic writer's default mode, which can strip executable bits from bundled scripts used by installed or synced skills. + +## Requirements Trace + +- R1. Pi sync must evaluate same-run prompt and skill publication status as a combined dependency graph so valid cross-type dependencies are not incorrectly demoted. +- R2. Managed Pi sync state must continue to reflect only artifacts that are truly published after stabilization, including rerun narrowing and stale cleanup behavior. +- R3. Pi materialization must preserve source file modes for copied files so executable bundled scripts remain executable after sync/install. +- R4. The fixes must follow existing Pi sync/materialization patterns and extend the current regression suite rather than introducing new persistence models or side-channel state. + +## Scope Boundaries + +- No changes to persisted alias semantics beyond what is required for correct same-run stabilization. +- No redesign of Pi sync status types, retry policy categories, or broader trust-boundary handling. +- No broader file metadata work beyond preserving source mode for copied materialized files. +- No implementation of unrelated review items from older PR #288 feedback plans. + +## Context & Research + +### Relevant Code and Patterns + +- `src/sync/pi.ts` owns the sync pass loop, retry narrowing, published artifact aggregation, warning accumulation, and `stabilizeSameRunQualifiedDependencies()`. +- `src/sync/commands.ts` and `src/sync/pi-skills.ts` already attach `sameRunDependencies` to published prompt/skill results. +- `src/utils/pi-skills.ts` is the shared Pi materialization seam used by both sync and writer flows via `copySkillDirForPi()` and `copyDirForPiMaterialization()`. +- `src/utils/files.ts` already treats file modes as part of the atomic-write/snapshot contract via explicit `mode` handling and snapshot restore logic. +- `tests/sync-pi.test.ts` already contains contract-style coverage for same-run qualified refs, retry narrowing, published-only name maps, and sync cleanup behavior. +- `tests/pi-writer.test.ts` already covers shared Pi materialization and writer-side artifact lifecycle behavior. +- `tests/files.test.ts` is the local precedent for asserting permission preservation as part of file-helper correctness. + +### Institutional Learnings + +- `docs/plans/2026-04-02-004-fix-pi-review-bug-batch-plan.md`: same-run resolvability should stay separate from persisted alias maps; final state should be derived only from artifacts that actually publish. +- `docs/plans/2026-04-01-003-fix-pi-capability-convergence-followups-plan.md`: retry and publication state should converge deterministically across prompts and skills together. +- `docs/plans/2026-03-20-003-fix-pi-symlink-boundary-and-materialization-safety-plan.md`: Pi materialization should remain a shared helper contract rather than diverging by caller. +- `docs/plans/2026-04-01-002-fix-pi-transactional-parity-followups-plan.md`: filesystem permissions are part of correctness, not incidental metadata. +- `docs/solutions/codex-skill-prompt-entrypoints.md`: preserve canonical internal identities and generate target-safe entrypoints/output aliases as a separate layer. + +### External References + +- None. Local patterns and existing Pi-specific plans were sufficient for this follow-up. + +## Key Technical Decisions + +- Keep same-run stabilization in the sync convergence layer rather than widening persisted `nameMaps`. + Rationale: the bug is about how current-run prompt/skill results are interpreted together, not about missing long-lived aliases. Persisted aliases should continue to represent only final published artifacts. +- Treat prompt and skill results as one publication set when evaluating same-run dependencies. + Rationale: prompt-to-skill and skill-to-prompt references are cross-type by definition, so per-type stabilization cannot determine whether a dependency is published in the same pass. +- Preserve source mode at the shared Pi materialization seam instead of patching sync and writer callers independently. + Rationale: both sync and writer flows already rely on `copySkillDirForPi()` and should continue to share one contract for copied file behavior. +- Extend existing regression suites in `tests/sync-pi.test.ts` and `tests/pi-writer.test.ts` rather than adding a new test harness. + Rationale: the failure modes are already represented in those suites' lifecycle and convergence assertions. + +## Open Questions + +### Resolved During Planning + +- Should this be a new plan or an update to older PR #288 follow-up plans? + Resolution: new plan. These are newly validated review findings against the current PR state and should be tracked separately from older feedback rounds. +- Should the fix persist more same-run aliases to make cross-type stabilization succeed? + Resolution: no. The plan keeps same-run stabilization logic separate from persisted alias maps and continues to derive final managed state only from published artifacts. +- Should file-mode preservation be fixed separately for sync and writer codepaths? + Resolution: no. The shared materialization helper should own copied-file mode preservation so both codepaths stay in parity. + +### Deferred to Implementation + +- Whether the cleanest implementation is a combined stabilization helper that accepts both result sets or a precomputed combined published-dependency view. + Why deferred: this is a code-shape choice that depends on the current implementation details once editing starts, but it should not change the planned behavior. +- Whether mode-only changes require a helper-level no-op detection adjustment in addition to passing explicit modes into writes. + Why deferred: the desired behavior is clear, but the narrowest code change depends on how the existing materialization fast paths behave under tests. + +## Implementation Units + +- [x] **Unit 1: Fix cross-type same-run stabilization in Pi sync** + +**Goal:** Ensure prompt and skill results stabilize against a combined same-run publication view so cross-type dependencies converge correctly before published artifacts, alias maps, rerun narrowing, and cleanup are derived. + +**Requirements:** R1, R2, R4 + +**Dependencies:** None + +**Files:** +- Modify: `src/sync/pi.ts` +- Verify/adjust if needed: `src/sync/commands.ts` +- Verify/adjust if needed: `src/sync/pi-skills.ts` +- Verify/adjust if needed: `src/utils/pi-skills.ts` +- Test: `tests/sync-pi.test.ts` + +**Approach:** +- Change stabilization planning so same-run dependency checks can see published prompts and published skills together for the current pass rather than inferring both sets from a single per-type `results` array. +- Preserve the existing status model (`published`, `retryable`, `blocked-by-policy`, `unsupported-final`) and continue deriving final managed state from the post-stabilization published subset only. +- Ensure rerun narrowing still retries only the artifacts that remain retryable after combined stabilization, not artifacts that were already proven published or permanently unsupported. +- Keep current-run dependency handling separate from persisted alias state so no dead aliases or transient publish outcomes leak into managed `nameMaps`. + +**Execution note:** Start with characterization coverage for the current broken cross-type behavior before changing the convergence logic. + +**Patterns to follow:** +- `src/sync/pi.ts` published-only derivation via `filterPublishedPromptMap()` and `filterPublishedSkillMap()` +- `tests/sync-pi.test.ts` existing same-run qualified sibling and retry narrowing coverage +- `docs/plans/2026-04-02-004-fix-pi-review-bug-batch-plan.md` for the distinction between same-run resolvability and persisted aliases + +**Test scenarios:** +- Happy path: prompt referencing a same-run skill remains published when the depended-on skill also publishes in the same pass, and both appear in final sync artifacts/name maps. +- Happy path: skill referencing a same-run prompt remains published when the depended-on prompt also publishes in the same pass. +- Error path: prompt depending on a same-run skill that becomes retryable or blocked is also demoted to `retryable` for that pass and omitted from final managed state. +- Error path: skill depending on a same-run prompt that becomes retryable or blocked is also demoted to `retryable` for that pass and omitted from final managed state. +- Integration: mixed dependency chain `prompt A -> skill B -> prompt C` where `prompt C` fails first pass causes `A` and `B` to demote together, then retries only the narrowed mixed set on the next pass. +- Integration: an artifact written earlier in the pass but later demoted during stabilization is removed from final on-disk outputs and not retained in cleanup inputs or `currentRunArtifacts`. +- Integration: narrow rerun mode produces the same final published prompt/skill state as full rerun mode for a mixed prompt+skill dependency case. + +**Verification:** +- Cross-type same-run cases in `tests/sync-pi.test.ts` fail before the fix and pass afterward. +- Final sync state includes only artifacts that remain published after combined stabilization. +- Retry narrowing still converges deterministically without broadening the rerun set back to already-published artifacts. + +- [x] **Unit 2: Preserve executable file modes during Pi materialization** + +**Goal:** Preserve source file modes for copied Pi materialization outputs so executable bundled scripts remain executable across sync, writer, incremental updates, and rollback-sensitive paths. + +**Requirements:** R3, R4 + +**Dependencies:** None + +**Files:** +- Modify: `src/utils/pi-skills.ts` +- Modify if needed: `src/utils/files.ts` +- Test: `tests/sync-pi.test.ts` +- Test: `tests/pi-writer.test.ts` +- Test: `tests/files.test.ts` + +**Approach:** +- Update the shared copied-file materialization path so source mode is captured and reapplied whenever a file is copied into a materialized Pi skill tree. +- Preserve parity between initial replacement materialization and incremental updates so executable files do not lose their mode on first publish or later syncs. +- Confirm the chosen change still works with the repo’s atomic-write and rollback expectations instead of introducing a Pi-only permission path. +- Limit the scope to copied files; do not broaden the change into a general metadata sync system. +- This unit can land independently from Unit 1 even though Unit 1 remains the recommended first change because it affects the broader sync convergence contract. + +**Execution note:** Add regression coverage first for executable copied assets in both sync and writer paths, then tighten the helper behavior only as much as those tests require. + +**Patterns to follow:** +- `src/utils/files.ts` existing explicit `mode` support and snapshot restore logic +- `tests/files.test.ts` permission preservation assertions using `stat.mode & 0o777` +- `tests/pi-writer.test.ts` shared materialization lifecycle coverage + +**Test scenarios:** +- Happy path: first Pi sync materialization preserves `0755` on an executable copied script under a skill directory. +- Happy path: writer/install materialization preserves `0755` on the same kind of copied executable asset. +- Edge case: incremental resync after changing an executable file’s contents keeps the executable bit intact. +- Edge case: mode-only source change (`0644 -> 0755` or `0755 -> 0644`) is reflected in the materialized target even when file bytes are unchanged. +- Integration: no-op rerun leaves copied executable files in place without stripping their mode. +- Integration: rollback-sensitive materialization path restores prior mode as well as prior content when a write path fails after mutation begins. + +**Verification:** +- Sync and writer regression tests assert executable copied files retain `stat.mode & 0o777` across initial publish and reruns. +- The shared helper remains the single owner of copied-file materialization behavior for both sync and writer paths. +- Existing Pi materialization and file-helper tests continue to pass without introducing broader permission churn. + +## System-Wide Impact + +- **Interaction graph:** Unit 1 affects the full Pi sync convergence pipeline: per-pass prompt/skill results, retry narrowing, warning accumulation, managed artifacts, and sync `nameMaps`. Unit 2 affects the shared Pi materialization helper used by both `syncPiSkills()` and `writePiBundle()`. +- **Error propagation:** Unit 1 must preserve current classification of unsupported and blocked artifacts while only changing how cross-type published/retryable outcomes are stabilized. Unit 2 must preserve existing atomic-write failure and rollback behavior. +- **State lifecycle risks:** Unit 1 touches transiently written artifacts that may later be demoted in the same run; stale cleanup and final state derivation must stay aligned. Unit 2 touches copied-file metadata and must not cause repeated churn or false no-op detection. +- **API surface parity:** Both sync and writer Pi paths must keep consistent materialization semantics. Same-run dependency behavior must stay consistent across prompt and skill publication rather than favoring one artifact type. +- **Integration coverage:** Unit tests alone are insufficient; the plan relies on end-to-end sync tests that assert final on-disk outputs, rerun narrowing, and managed-state contents together. +- **Unchanged invariants:** Persisted alias maps remain published-only. Pi status categories remain unchanged. Trust-boundary, symlink-safety, and legacy cleanup policies are out of scope for this fix. + +## Risks & Dependencies + +| Risk | Mitigation | +|------|------------| +| Fixing cross-type stabilization accidentally broadens reruns or persists transient aliases | Keep the change in the convergence layer, preserve existing status categories, and verify final `nameMaps`/artifacts are derived only from post-stabilization published subsets | +| Mode preservation fix causes no-op churn or misses incremental updates | Add explicit mode assertions for first publish, incremental content changes, and mode-only changes before settling on the narrowest helper change | +| Shared helper changes drift sync and writer behavior apart | Keep the file-mode fix at the shared materialization seam and verify both sync and writer regression suites | + +## Documentation / Operational Notes + +- No additional operational monitoring is required; this is a local correctness fix in conversion/sync behavior. +- After implementation, respond to the PR review threads with the concrete behavior change and the regression coverage added for each finding. + +## Sources & References + +- PR review: https://github.com/EveryInc/compound-engineering-plugin/pull/288#pullrequestreview-4051612811 +- Related plan: `docs/plans/2026-04-02-004-fix-pi-review-bug-batch-plan.md` +- Related plan: `docs/plans/2026-04-01-003-fix-pi-capability-convergence-followups-plan.md` +- Related plan: `docs/plans/2026-04-01-002-fix-pi-transactional-parity-followups-plan.md` +- Related code: `src/sync/pi.ts` +- Related code: `src/utils/pi-skills.ts` +- Related tests: `tests/sync-pi.test.ts` +- Related tests: `tests/pi-writer.test.ts` diff --git a/docs/plans/2026-04-02-006-fix-pi-pr288-deep-review-followups-plan.md b/docs/plans/2026-04-02-006-fix-pi-pr288-deep-review-followups-plan.md new file mode 100644 index 000000000..c0c14522b --- /dev/null +++ b/docs/plans/2026-04-02-006-fix-pi-pr288-deep-review-followups-plan.md @@ -0,0 +1,375 @@ +--- +title: "fix: Address deep Pi review follow-up bugs on PR #288" +type: fix +status: completed +date: 2026-04-02 +pr: https://github.com/EveryInc/compound-engineering-plugin/pull/288 +sources: + - https://github.com/EveryInc/compound-engineering-plugin/pull/288#pullrequestreview-4052078233 +--- + +# fix: Address deep Pi review follow-up bugs on PR #288 + +## Overview + +Harden the Pi install/sync implementation so skill installation for the Claude Code plugin behaves correctly under same-run dependency rewrites, incremental skill updates, rollback, and permission preservation. This plan expands beyond the two validated Codex findings to cover the surrounding bug cluster that shares the same rewrite and transaction seams. + +Delivery strategy: land this as an ordered series of focused commits on the existing PR branch, grouped by bug cluster rather than as one undifferentiated patch. Unit 1 and Unit 2 can land independently, Units 3 and 5 are the transactional hardening stream, and Unit 4 can land independently so mode-parity fixes do not block rollback-safety work. + +## Problem Frame + +The current Pi integration has drifted into two inconsistent models of reality. The rewrite pipeline in `src/utils/pi-skills.ts` is markdown-aware, punctuation-aware, and supports multiple live reference shapes, but same-run dependency extraction is still a narrow regex pass over raw text. In parallel, incremental copied-skill updates are locally rollback-safe inside the materialization helper, but the outer sync/install transaction often does not snapshot those skill directories before later steps mutate prompts, managed state, compat resources, or MCP config. The result is a set of bugs where Pi can publish an artifact with a dangling rewritten ref, incorrectly demote documentation-only artifacts, silently normalize unresolved first-party task agents to bogus leaf names, or leave the skill tree ahead of restored managed state after a failed run. + +## Requirements Trace + +- R1. Same-run dependency extraction must match actual Pi rewrite semantics for all supported live reference forms and must ignore literal/example text that is not rewritten. +- R2. Unresolved first-party task-agent refs must fail closed during Pi sync/install instead of silently falling back to normalized leaf names. +- R3. Sync and install must snapshot incrementally updated skill directories before later outer-transaction work so rollback restores the entire pre-run published state. +- R4. Pi materialization must preserve source file mode for both copied files and rewritten `SKILL.md` files, including mode-only changes and no-op reruns. +- R5. Managed manifest/verification commit ordering must remain rollback-safe and fail closed under write or snapshot failure. +- R6. Final managed state, published-only name maps, and on-disk Pi outputs must be derived only from post-stabilization published artifacts. + +## Scope Boundaries + +- No redesign of Pi status classes, policy/trust models, or managed-state schema beyond what is required to fix rollback ordering and published-state correctness. +- No new long-lived alias domains; canonical logical names, same-run resolvable aliases, and emitted safe names remain separate concepts. +- No broader markdown parser replacement for all Pi transforms; the goal is parity with existing rewrite semantics, not a new document engine. +- No changes to non-Pi targets except where existing shared helper behavior already defines the correct contract. + +## Context & Research + +### Relevant Code and Patterns + +- `src/utils/pi-skills.ts` is the central authority for Pi rewrite behavior, same-run extraction, copied-skill materialization, rendered `SKILL.md` rewriting, fast-path metadata, and helper-local rollback. +- `src/sync/pi.ts` owns sync pass orchestration, same-run stabilization, rerun narrowing, publication snapshots, managed-state derivation, and rollback. +- `src/sync/pi-skills.ts` and `src/sync/commands.ts` are the sync entry points that currently attach `sameRunDependencies` and strict rewrite options. +- `src/targets/pi.ts` is the install/writer transaction owner and must stay behaviorally aligned with sync for copied-skill updates and managed-state rollback. +- `src/utils/pi-managed.ts` already stages manifest/verification writes together and is the right place to mirror when reasoning about commit ordering. +- `src/utils/files.ts` is the canonical atomic-write and mode-preservation contract; file mode is already part of equality/no-op semantics. +- `tests/sync-pi.test.ts`, `tests/pi-writer.test.ts`, `tests/files.test.ts`, and `tests/pi-converter.test.ts` already act as contract-level coverage for Pi rewrite semantics, transactional behavior, and permission handling. + +### Institutional Learnings + +- `docs/plans/2026-04-02-004-fix-pi-review-bug-batch-plan.md`: preserve the distinction between canonical logical name, same-run resolvable alias, and emitted safe name. Final managed state must only include published artifacts. +- `docs/plans/2026-04-01-002-fix-pi-transactional-parity-followups-plan.md`: rollback, cleanup, and optimization must consume verified previous state and successfully committed current outputs only. +- `docs/plans/2026-03-20-003-fix-pi-symlink-boundary-and-materialization-safety-plan.md`: shared Pi materialization helpers are the contract owner; sync/install should not diverge semantically. +- `docs/plans/2026-03-20-004-fix-pi-routing-modes-and-cleanup-plan.md`: source mode preservation is part of correctness for materialized skill trees, including rewritten files. +- `docs/plans/2026-03-31-005-fix-pi-final-resolution-and-compat-seams-plan.md`: unresolved first-party refs must fail closed rather than silently retargeting to leaf-name fallbacks. +- `docs/solutions/codex-skill-prompt-entrypoints.md`: preserve canonical identity internally and only apply target-safe wrappers at the boundary. +- `docs/solutions/integrations/cross-platform-model-field-normalization-2026-03-29.md`: prefer one authoritative normalization source and omit/fail-closed when mapping certainty is not real. + +### External References + +- None. Local Pi patterns and prior plans are strong enough, and this work is primarily about internal contract consistency rather than framework semantics. + +## Key Technical Decisions + +- Make same-run dependency extraction derive from the same markdown-aware live-syntax model as rewrite, rather than extending the current raw regex scan. + Rationale: the rewrite contract already defines what Pi treats as live syntax versus literal/example text. A second looser parser guarantees more drift. +- Fix unresolved first-party task-agent rejection in the shared name-resolution layer rather than at individual call sites. + Rationale: `Task ...`, `Run subagent ...`, and `Run ce_subagent ...` all flow through the same task-agent normalization seam and must share one fail-closed policy. +- Treat incremental skill directory updates as outer-transaction mutations that require publication snapshots before later work continues. + Rationale: helper-local rollback only protects failures during the skill update itself; it cannot restore pre-run state after later prompt/manifest/shared-resource failures. +- Extend mode preservation to rewritten `SKILL.md` writes and rendered incremental ops, not just copied files. + Rationale: Pi treats rewritten and copied files as part of the same materialized skill tree. Permission drift on rewritten `SKILL.md` is still a correctness bug. +- Keep managed manifest/verification commit ordering fail closed even if that requires a characterization-first pass before choosing the narrowest code change. + Rationale: managed-state drift changes trust, cleanup authority, and runtime fallback behavior, so this boundary needs explicit protection. + +## Open Questions + +### Resolved During Planning + +- Should this update the existing `2026-04-02-005` plan or become a new plan? + Resolution: new plan. The existing plan covers the earlier two findings; this planning pass expands into a broader cross-cutting bug cluster and should be tracked separately. +- Should same-run extraction extend persisted alias state to make more lookups succeed? + Resolution: no. Extraction/rewrite parity should improve current-run dependency truth, but final managed state must remain published-only. +- Should rollback fixes live only inside `copySkillDirForPi()`? + Resolution: no. The helper-local rollback stays, but sync/install must also snapshot the skill directory before any outer transaction can later fail. + +### Deferred to Implementation + +- Whether the cleanest extraction implementation is a shared tokenizer over markdown lines, a dependency-collector variant of `transformPiBodyContent()`, or a smaller refactor that reuses protected-line handling without duplicating rewrite behavior. + Why deferred: the planned behavior is clear, but the narrowest code shape depends on the current helper layout once editing begins. +- Whether managed-state ordering is best fixed by moving outer snapshots earlier, reusing `writePiManagedState()` staging more directly, or introducing a smaller wrapper for rollback capture. + Why deferred: this is an implementation-shape question that depends on how much existing staging can be reused without widening the transaction API. + +## High-Level Technical Design + +> *This illustrates the intended approach and is directional guidance for review, not implementation specification. The implementing agent should treat it as context, not code to reproduce.* + +```mermaid +flowchart TB + A[Source skill / command content] --> B[Markdown-aware live-syntax scan] + A --> C[Markdown-aware rewrite] + B --> D[Same-run dependency set] + C --> E[Emitted Pi content] + D --> F[Prompt/skill stabilization fixpoint] + F --> G[Published artifacts only] + G --> H[Managed name maps / cleanup inputs] + + I[Incremental skill mutation] --> J[Outer publication snapshot] + J --> K[Helper-local mutation + local rollback] + K --> L[Prompts / AGENTS / MCP / managed-state writes] + L --> M[Commit success] + L --> N[Outer rollback restores pre-run tree] +``` + +The key design constraint is that `B` and `C` must agree on what counts as a live dependency, and `I -> N` must behave transactionally across both sync and install even when the skill update itself succeeded earlier. + +## Implementation Units + +```mermaid +flowchart TB + U1[Unit 1 +Dependency extraction parity] --> U2[Unit 2 +Task-agent rejection completeness] + U1 --> U3[Unit 3 +Outer rollback for incremental skills] + U3 --> U5[Unit 5 +Managed-state commit ordering] + U4[Unit 4 +Rendered SKILL.md mode parity] --> U5 +``` + +- [x] **Unit 1: Make same-run dependency extraction match live Pi rewrite semantics** + +**Goal:** Replace the current raw regex dependency scan with a markdown-aware live-syntax scan that matches how Pi actually rewrites content. + +**Requirements:** R1, R6 + +**Dependencies:** None + +**Files:** +- Modify: `src/utils/pi-skills.ts` +- Modify if needed: `src/sync/pi.ts` +- Test: `tests/sync-pi.test.ts` +- Test: `tests/pi-converter.test.ts` + +**Approach:** +- Make same-run extraction reuse the same concept of “live syntax” versus literal/example text already enforced by the rewrite path. +- Support the full set of live same-run shapes the rewriter accepts: `Task ...`, `Run subagent ...`, `Run ce_subagent ...`, `/skill:...`, `/prompt:...`, `/prompts:...`, and unqualified as well as `claude-home:`-qualified refs. +- Normalize extraction token boundaries the same way slash-command rewriting already does so trailing punctuation is removed consistently. +- Keep extraction concerned with current-run dependency truth only; do not mutate persisted alias domains or expand managed `nameMaps`. + +**Execution note:** Add characterization coverage first for punctuation, code/example text, and unqualified/structured ref shapes before changing the extractor. + +**Patterns to follow:** +- `src/utils/pi-skills.ts` `transformPiBodyContent()` and `transformPiMarkdownLine()` +- `tests/pi-converter.test.ts` accepted rewrite forms for `Task` and `Run subagent` +- `docs/plans/2026-04-02-004-fix-pi-review-bug-batch-plan.md` + +**Test scenarios:** +- Happy path: `/skill:claude-home:ce:plan,` and `/prompt:claude-home:plan-review.` demote/publish based on the sibling’s real source name, not the punctuated token. +- Happy path: `/prompts:claude-home:plan-review,` participates in same-run dependency tracking with the same punctuation trimming and sibling-name resolution rules as rewrite. +- Happy path: unqualified `Task ce:plan(feature_description)` participates in same-run dependency tracking and demotes when the sibling skill is unpublished. +- Happy path: `Run subagent with agent="claude-home:ce:plan"` and `Run ce_subagent with agent="claude-home:ce:plan"` participate in same-run dependency tracking. +- Edge case: inline code, fenced code, indented code, and blockquote examples containing `claude-home:` refs do not create same-run dependencies. +- Error path: dependency on a sibling that becomes `unsupported-final` or `blocked-by-policy` does not leave the dependent artifact published with a dangling rewritten ref. +- Integration: a mixed graph with prompts and skills reaches the same final published set in both narrow and full rerun modes after the extractor change. + +**Verification:** +- Dependency extraction and emitted rewrite behavior agree for all supported live ref forms. +- Documentation-only example text no longer causes retry/demotion. +- Final published outputs and managed state omit dependents of unpublished siblings. + +- [x] **Unit 2: Complete fail-closed handling for unresolved first-party task-agent refs** + +**Goal:** Ensure unresolved first-party `Task ...` and structured subagent refs are rejected or skipped rather than silently falling back to normalized leaf names. + +**Requirements:** R2, R6 + +**Dependencies:** None + +**Files:** +- Modify: `src/utils/pi-skills.ts` +- Verify/adjust if needed: `src/sync/commands.ts` +- Verify/adjust if needed: `src/sync/pi-skills.ts` +- Test: `tests/sync-pi.test.ts` +- Test: `tests/pi-converter.test.ts` + +**Approach:** +- Tighten the shared task-agent normalization path so first-party qualified refs (`claude-home:*`, `compound-engineering:*`) fail closed whenever no valid mapping exists, not only in leaf-collision branches. +- Preserve the current distinction between first-party and foreign-qualified handling so foreign refs can still follow the existing preserve/reject behavior. +- Keep the enforcement at the shared normalization seam so `Task ...`, `Run subagent ...`, and `Run ce_subagent ...` stay consistent. + +**Execution note:** Start with failing sync coverage for unresolved first-party `Task`, `Run subagent`, and `Run ce_subagent` forms. + +**Patterns to follow:** +- `src/utils/pi-skills.ts` `normalizePiPromptReferenceName()` fail-closed first-party handling +- `docs/plans/2026-03-31-005-fix-pi-final-resolution-and-compat-seams-plan.md` + +**Test scenarios:** +- Error path: `Task claude-home:missing(feature_description)` is rejected/skipped instead of publishing `agent="missing"`. +- Error path: `Run subagent with agent="claude-home:missing"` is rejected/skipped instead of silently normalizing to a leaf agent. +- Error path: `Run ce_subagent with agent="compound-engineering:missing"` is rejected/skipped. +- Happy path: exact first-party mapping still rewrites successfully when a trusted same-run or installed mapping exists. +- Error path: a dependent artifact using an unresolved first-party task-agent ref is omitted from final published artifacts and managed state. +- Integration: foreign qualified refs retain current preserve/reject behavior and are not accidentally reclassified as first-party failures. + +**Verification:** +- No published Pi prompt/skill contains silently normalized bogus leaf agents for unresolved first-party refs. +- Existing foreign-qualified behavior remains unchanged. + +- [x] **Unit 3: Restore outer rollback coverage for incrementally updated skill directories** + +**Goal:** Make sync and install rollback restore the pre-run skill tree even when an incremental skill mutation succeeded before a later outer transaction failure. + +**Requirements:** R3, R6 + +**Dependencies:** Unit 1 + +**Files:** +- Modify: `src/sync/pi.ts` +- Modify: `src/targets/pi.ts` +- Modify if needed: `src/utils/pi-skills.ts` +- Test: `tests/sync-pi.test.ts` +- Test: `tests/pi-writer.test.ts` + +**Approach:** +- Snapshot skill directories before any incremental copied-skill mutation that the outer publication transaction may need to roll back later. +- Preserve the existing helper-local rollback inside `copySkillDirForPi()` for failures during the skill update itself. +- Align sync and install so copied-skill mutation semantics match prompts/shared-file semantics at the outer transaction boundary. +- Include the fast-path/fingerprint side effects in the rollback model so a restored tree does not keep stale “new” metadata. + +**Technical design:** *(directional guidance, not implementation specification)* +The outer owner should treat `mode === "incremental"` as a real publish mutation whenever later steps can still fail. Local helper rollback stays in place, but outer rollback must also remember the pre-run directory image before the helper returns successfully. + +**Patterns to follow:** +- Prompt/shared-file snapshot-before-mutate pattern in `src/sync/pi.ts` +- Copied-skill parity expectations in `src/targets/pi.ts` +- `docs/plans/2026-04-01-002-fix-pi-transactional-parity-followups-plan.md` + +**Test scenarios:** +- Integration: incremental sync skill update succeeds, then a later prompt/shared-resource/managed-state failure occurs; the skill tree is restored to its pre-run contents. +- Integration: incremental install copied-skill update succeeds, then a later install failure occurs; the copied skill directory is restored. +- Edge case: rollback restores added files, removed files, nested changes, and mode-only changes in the skill tree. +- Integration: fast-path/fingerprint state does not remain advanced after outer rollback. +- Error path: helper-local rollback still works if the incremental skill update itself fails before outer transaction work proceeds. + +**Verification:** +- Failed sync/install runs leave the skill tree, managed state, and shared resources aligned to the same pre-run snapshot. +- Incremental updates no longer create partial commits that survive a later outer failure. + +- [x] **Unit 4: Preserve source mode for rewritten `SKILL.md` paths** + +**Goal:** Extend mode preservation to rendered `SKILL.md` writes and rendered incremental ops so rewritten files stay in parity with copied-file mode guarantees. + +**Requirements:** R4 + +**Dependencies:** None + +**Files:** +- Modify: `src/utils/pi-skills.ts` +- Modify if needed: `src/utils/files.ts` +- Test: `tests/files.test.ts` +- Test: `tests/pi-writer.test.ts` +- Test: `tests/sync-pi.test.ts` + +**Approach:** +- Thread source mode through the rendered `SKILL.md` rewrite path, not only copied-file writes. +- Make rendered no-op/incremental comparisons treat mode as part of equality so mode-only source changes are not frozen behind content-only checks. +- Keep replace-path and incremental-path semantics aligned so rewritten `SKILL.md` and copied assets obey the same materialization contract. + +**Execution note:** Characterization-first on rendered `SKILL.md` mode behavior before changing the rewrite path. + +**Patterns to follow:** +- `src/utils/files.ts` mode-aware atomic write helpers +- `tests/files.test.ts` `stat.mode & 0o777` assertions +- `docs/plans/2026-03-20-004-fix-pi-routing-modes-and-cleanup-plan.md` + +**Test scenarios:** +- Happy path: rewritten `SKILL.md` preserves a non-default source mode on initial materialization. +- Edge case: mode-only source change on rewritten `SKILL.md` updates the target mode even when rendered bytes are unchanged. +- Integration: no-op rerun does not freeze a wrong rendered-file mode in the fast-path record. +- Integration: replace-path and incremental-path rewrites preserve the same final mode for `SKILL.md`. +- Error path: rollback after a later outer failure restores prior `SKILL.md` mode as well as prior content. + +**Verification:** +- Both copied assets and rewritten `SKILL.md` files preserve source mode across initial publish, incremental update, and no-op reruns. +- Rendered-file mode drift is no longer possible without tests failing. + +- [x] **Unit 5: Tighten managed-state commit ordering and rollback safety** + +**Goal:** Ensure manifest/verification writes and rollback snapshots preserve a fail-closed managed-state boundary even if write or snapshot capture fails. + +**Requirements:** R5, R6 + +**Dependencies:** Unit 3 + +**Files:** +- Modify: `src/sync/pi.ts` +- Modify: `src/targets/pi.ts` +- Verify/adjust if needed: `src/utils/pi-managed.ts` +- Test: `tests/sync-pi.test.ts` +- Test: `tests/pi-writer.test.ts` + +**Approach:** +- Revisit when manifest/verification rollback state is captured relative to `writePiManagedState()` so rollback can restore the prior verified pair even if snapshot capture itself fails. +- Keep the managed-state boundary fail closed: partial writes must not grant cleanup/trust authority over artifacts that were rolled back. +- Cover both non-empty updates and empty-state deletion transitions, since they have different failure surfaces. +- This unit should not wait on rendered-file mode parity unless implementation discovers a concrete shared-code coupling; rollback safety is the higher-risk integrity boundary. + +**Execution note:** Start with fault-injection characterization around verification-write failure and snapshot-capture-after-write failure before selecting the minimal code change. + +**Patterns to follow:** +- `src/utils/pi-managed.ts` staged manifest/verification write behavior +- `docs/plans/2026-03-30-004-fix-pi-publication-runtime-trust-plan.md` +- existing manifest rollback tests in `tests/pi-writer.test.ts` + +**Test scenarios:** +- Error path: verification write fails after manifest write succeeds; rollback restores the prior verified manifest + verification pair. +- Error path: snapshot capture fails immediately after managed-state write; the newly written managed files do not survive while other artifacts roll back. +- Edge case: transition from non-empty managed state to empty state restores both manifest and verification if deletion fails partway through. +- Integration: failed sync/install run does not leave managed files claiming ownership of artifacts/shared resources that were restored to the old state. + +**Verification:** +- Managed manifest/verification remain aligned with the actual on-disk Pi tree after any injected failure in the commit phase. +- Trust, cleanup authority, and runtime fallback behavior remain fail closed under partial failure. + +## System-Wide Impact + +- **Interaction graph:** This work touches the end-to-end Pi flow from source markdown -> rewrite/dependency extraction -> same-run stabilization -> published artifacts -> managed state -> runtime trust/cleanup. It also spans both sync (`src/sync/pi.ts`) and install (`src/targets/pi.ts`) mutation owners. +- **Error propagation:** First-party unresolved refs should fail closed at normalization time; same-run dependency misses should demote before final state derivation; outer transaction failures must restore skill trees, prompts, shared resources, and managed state together. +- **State lifecycle risks:** Partial skill mutation, stale fast-path metadata, published-then-demoted artifacts, mode-only drift, and managed-state files surviving failed rollback are the major lifecycle risks. +- **API surface parity:** `Task ...`, `Run subagent ...`, `Run ce_subagent ...`, `/skill:...`, `/prompt:...`, and `/prompts:...` must behave consistently between extraction and rewrite. Sync and install must share the same materialization and rollback contract. +- **Integration coverage:** The critical bugs live across layers; tests must assert final on-disk outputs, retry narrowing, rollback restoration, and managed-state contents together rather than only unit-level helper behavior. +- **Unchanged invariants:** Persisted alias maps stay published-only. Canonical internal identity remains separate from emitted safe names. Foreign-qualified handling should not silently widen. Trust/symlink policy remains out of scope except where commit ordering affects fail-closed behavior. + +## Risks & Dependencies + +| Risk | Mitigation | +|------|------------| +| Dependency extraction parity accidentally changes rewrite behavior instead of just matching it | Reuse the existing markdown-aware rewrite model as the authority and add characterization tests before changing extraction | +| Rollback fixes introduce excessive snapshot churn or regress no-op fast paths | Snapshot only when a real outer mutation is about to occur, and verify no-op/install/sync snapshot expectations stay green | +| Managed-state ordering fix widens the transaction API unnecessarily | Start with fault-injection characterization and prefer the smallest change that preserves the existing staged-write contract | +| Task-agent fail-closed changes accidentally break foreign-qualified behavior | Add explicit regression coverage for first-party vs foreign-qualified cases before tightening shared normalization | + +## Alternative Approaches Considered + +- Keep patching `collectPiSameRunDependencies()` with more regexes. + Rejected: it would continue diverging from the markdown-aware rewrite path and would likely miss new supported syntaxes again. +- Expand persisted `nameMaps` to encode more same-run state. + Rejected: that would blur the boundary between transient current-run resolvability and published managed-state truth. +- Rely only on helper-local rollback in `copySkillDirForPi()`. + Rejected: it cannot restore pre-run state after later outer transaction failures. + +## Documentation / Operational Notes + +- No production rollout or monitoring changes are expected; this is a local correctness hardening of the Pi target. +- After implementation, the PR review replies should group fixes by bug cluster: extraction parity, fail-closed ref handling, rollback parity, mode preservation, and managed-state ordering. + +## Sources & References + +- Codex review: https://github.com/EveryInc/compound-engineering-plugin/pull/288#pullrequestreview-4052078233 +- Related plan: `docs/plans/2026-04-02-005-fix-pi-pr288-review-followup-plan.md` +- Related plan: `docs/plans/2026-04-02-004-fix-pi-review-bug-batch-plan.md` +- Related plan: `docs/plans/2026-04-01-002-fix-pi-transactional-parity-followups-plan.md` +- Related plan: `docs/plans/2026-03-20-003-fix-pi-symlink-boundary-and-materialization-safety-plan.md` +- Related code: `src/utils/pi-skills.ts` +- Related code: `src/sync/pi.ts` +- Related code: `src/targets/pi.ts` +- Related code: `src/utils/pi-managed.ts` +- Related tests: `tests/sync-pi.test.ts` +- Related tests: `tests/pi-writer.test.ts` +- Related tests: `tests/files.test.ts` +- Related tests: `tests/pi-converter.test.ts` diff --git a/src/sync/pi.ts b/src/sync/pi.ts index 016770472..e40e64727 100644 --- a/src/sync/pi.ts +++ b/src/sync/pi.ts @@ -176,7 +176,6 @@ export async function syncToPi( const skillResults = await syncPiSkills(activeSkills, layout.skillsDir, activeSkillMap, skillNameMaps, { onBeforeMutate: async (_skillName, targetPath, mode) => { - if (mode === "incremental") return await rememberSyncPublicationSnapshot(publicationSnapshots, targetPath) }, }) @@ -191,8 +190,17 @@ export async function syncToPi( }, ) - const stableSkillResults = stabilizeSameRunQualifiedDependencies(skillResults, activeSkillMap, activePromptMap) - const stablePromptResults = stabilizeSameRunQualifiedDependencies(promptResults, activeSkillMap, activePromptMap) + for (const artifact of promptResults.filter(isPublishedPromptResult).map((result) => result.artifact)) { + currentRunArtifacts.set(`${artifact.kind}:${artifact.relativePath}`, artifact) + } + for (const skill of skillResults.filter(isPublishedSkillResult)) { + const artifact = createManagedArtifact(layout, "synced-skill", skill.sourceName, skill.emittedName) + currentRunArtifacts.set(`${artifact.kind}:${artifact.relativePath}`, artifact) + } + + const stableResults = stabilizeSameRunQualifiedDependencies(skillResults, promptResults, activeSkillMap, activePromptMap) + const stableSkillResults = stableResults.skills + const stablePromptResults = stableResults.prompts for (const result of stableSkillResults) { aggregatedSkillResults.set(result.sourceName, result) @@ -422,10 +430,6 @@ export async function syncToPi( install: canUseVerifiedCleanup(trustInfo, "install"), sync: true, }) - if (didWriteManagedState) { - await rememberSyncPublicationSnapshot(publicationSnapshots, layout.managedManifestPath) - await rememberSyncPublicationSnapshot(publicationSnapshots, layout.verificationPath) - } } catch (error) { await restoreSyncPublicationSnapshots(publicationSnapshots) throw error @@ -606,39 +610,86 @@ function isPublishedSkillResult(result: SyncPiSkillResult): result is SyncPiSkil return result.status === "published" } -function stabilizeSameRunQualifiedDependencies, + activePromptMap: Record, +): { + skills: SyncPiSkillResult[] + prompts: SyncPiCommandResult[] +} { + let stableSkills = skillResults + let stablePrompts = promptResults + + while (true) { + const publishedSkills = new Set(stableSkills.filter((result) => result.status === "published").map((result) => result.sourceName)) + const publishedPrompts = new Set(stablePrompts.filter((result) => result.status === "published").map((result) => result.sourceName)) + const skillStatuses = new Map(stableSkills.map((result) => [result.sourceName, result.status])) + const promptStatuses = new Map(stablePrompts.map((result) => [result.sourceName, result.status])) + const nextSkills = demoteBlockedSameRunDependencies(stableSkills, publishedSkills, publishedPrompts, activeSkillMap, activePromptMap, skillStatuses, promptStatuses) + const nextPrompts = demoteBlockedSameRunDependencies(stablePrompts, publishedSkills, publishedPrompts, activeSkillMap, activePromptMap, skillStatuses, promptStatuses) + + if (sameResultStatuses(stableSkills, nextSkills) && sameResultStatuses(stablePrompts, nextPrompts)) { + return { skills: nextSkills, prompts: nextPrompts } + } + + stableSkills = nextSkills + stablePrompts = nextPrompts + } +} + +function demoteBlockedSameRunDependencies( results: T[], + publishedSkills: Set, + publishedPrompts: Set, activeSkillMap: Record, activePromptMap: Record, + skillStatuses: Map, + promptStatuses: Map, ): T[] { - const publishedSkills = new Set(results.filter((result) => result.status === "published").map((result) => result.sourceName)) - const publishedPrompts = new Set(results.filter((result) => result.status === "published").map((result) => result.sourceName)) - return results.map((result) => { if (result.status !== "published") return result - const blockedSkillDependency = (result.sameRunDependencies?.skills ?? []).some((dependency) => - Boolean(activeSkillMap[dependency]) && !publishedSkills.has(dependency)) - const blockedPromptDependency = (result.sameRunDependencies?.prompts ?? []).some((dependency) => - Boolean(activePromptMap[dependency]) && !publishedPrompts.has(dependency)) + const blockedSkillStatuses = (result.sameRunDependencies?.skills ?? []) + .filter((dependency) => Boolean(activeSkillMap[dependency]) && !publishedSkills.has(dependency)) + .map((dependency) => skillStatuses.get(dependency) ?? "retryable") + const blockedPromptStatuses = (result.sameRunDependencies?.prompts ?? []) + .filter((dependency) => Boolean(activePromptMap[dependency]) && !publishedPrompts.has(dependency)) + .map((dependency) => promptStatuses.get(dependency) ?? "retryable") + const blockedStatuses = [...blockedSkillStatuses, ...blockedPromptStatuses] - if (!blockedSkillDependency && !blockedPromptDependency) { + if (blockedStatuses.length === 0) { return result } + const nextStatus = blockedStatuses.some((status) => status === "blocked-by-policy") + ? "blocked-by-policy" + : blockedStatuses.some((status) => status === "unsupported-final") + ? "unsupported-final" + : "retryable" + return { ...result, - status: "retryable", + status: nextStatus, warning: undefined, } }) } +function sameResultStatuses(left: T[], right: T[]): boolean { + if (left.length !== right.length) return false + + const leftStatuses = left.map((result) => `${result.sourceName}:${result.emittedName}:${result.status}`).sort() + const rightStatuses = right.map((result) => `${result.sourceName}:${result.emittedName}:${result.status}`).sort() + return JSON.stringify(leftStatuses) === JSON.stringify(rightStatuses) +} + async function collectLegacySkillDirectoryCandidates( layout: ReturnType, nextSyncArtifacts: PiManagedArtifact[], diff --git a/src/targets/pi.ts b/src/targets/pi.ts index 5563f8911..6b2f842d3 100644 --- a/src/targets/pi.ts +++ b/src/targets/pi.ts @@ -210,12 +210,14 @@ export async function writePiBundle(outputRoot: string, bundle: PiBundle): Promi skill.name, bundle.nameMaps, { trustedRoot: skill.sourceDir }, - undefined, + { + preserveUnknownQualifiedRefs: true, + rejectUnknownQualifiedTaskRefs: true, + rejectUnresolvedFirstPartyQualifiedRefs: true, + }, { onBeforeMutate: async (mode) => { - if (mode === "replace" || previousArtifact) { - await rememberPiManagedPublicationSnapshot(publicationSnapshots, targetDir) - } + await rememberPiManagedPublicationSnapshot(publicationSnapshots, targetDir) if (previousArtifact) { await removeSkillDirectoryIfExists(targetDir) } @@ -346,25 +348,10 @@ export async function writePiBundle(outputRoot: string, bundle: PiBundle): Promi } } - if (shouldWritePiManagedState(nextState)) { - const didWriteManagedState = await writePiManagedState(paths, nextState, { - install: true, - sync: canUseVerifiedCleanup(trustInfo, "sync"), - }) - if (didWriteManagedState) { - await rememberPiManagedPublicationSnapshot(publicationSnapshots, paths.managedManifestPath) - await rememberPiManagedPublicationSnapshot(publicationSnapshots, paths.verificationPath) - } - } else { - if (await pathExists(paths.managedManifestPath)) { - await rememberPiManagedPublicationSnapshot(publicationSnapshots, paths.managedManifestPath) - } - if (await pathExists(paths.verificationPath)) { - await rememberPiManagedPublicationSnapshot(publicationSnapshots, paths.verificationPath) - } - await removeFileIfExists(paths.managedManifestPath) - await removeFileIfExists(paths.verificationPath) - } + await writePiManagedState(paths, nextState, { + install: true, + sync: canUseVerifiedCleanup(trustInfo, "sync"), + }) } catch (error) { await restorePiManagedPublicationSnapshots(publicationSnapshots) throw error diff --git a/src/utils/files.ts b/src/utils/files.ts index 05f1cf222..a1bc11745 100644 --- a/src/utils/files.ts +++ b/src/utils/files.ts @@ -136,7 +136,7 @@ export async function readJson(filePath: string): Promise { } export async function writeText(filePath: string, content: string): Promise { - await ensureManagedParentDir(filePath) + await ensureDir(path.dirname(filePath)) await fs.writeFile(filePath, content, "utf8") } @@ -269,8 +269,10 @@ export async function writeTextAtomicIfChanged(options: { const { filePath, content, mode, skipFailureHook = false, existingContent } = options await assertNoSymlinkAncestors(filePath) await assertNoSymlinkTarget(filePath) + const existingStats = await fs.stat(filePath).catch(() => null) const existing = existingContent === undefined ? await readText(filePath).catch(() => null) : existingContent - if (existing === content) { + const modeMatches = mode === undefined || !existingStats || (existingStats.mode & 0o777) === mode + if (existing === content && modeMatches) { return false } @@ -312,8 +314,10 @@ export async function writeFileAtomicIfChanged(options: { const { filePath, content, mode, skipFailureHook = false } = options await assertNoSymlinkAncestors(filePath) await assertNoSymlinkTarget(filePath) - const existing = await fs.readFile(filePath).catch(() => null) - if (existing && existing.equals(content)) { + const existingStats = await fs.stat(filePath).catch(() => null) + const existing = existingStats ? await fs.readFile(filePath) : null + const modeMatches = mode === undefined || !existingStats || (existingStats.mode & 0o777) === mode + if (existing && existing.equals(content) && modeMatches) { return false } @@ -646,14 +650,20 @@ export async function copyDir(sourceDir: string, targetDir: string): Promise string, + transformAllMarkdown?: boolean, ): Promise { await ensureDir(targetDir) const entries = await fs.readdir(sourceDir, { withFileTypes: true }) @@ -663,9 +673,12 @@ export async function copySkillDir( const targetPath = path.join(targetDir, entry.name) if (entry.isDirectory()) { - await copySkillDir(sourcePath, targetPath, transformSkillContent) + await copySkillDir(sourcePath, targetPath, transformSkillContent, transformAllMarkdown) } else if (entry.isFile()) { - if (entry.name === "SKILL.md" && transformSkillContent) { + const shouldTransform = transformSkillContent && ( + entry.name === "SKILL.md" || (transformAllMarkdown && entry.name.endsWith(".md")) + ) + if (shouldTransform) { const content = await readText(sourcePath) await writeText(targetPath, transformSkillContent(content)) } else { diff --git a/src/utils/pi-skills.ts b/src/utils/pi-skills.ts index 001f31e82..dacf952a7 100644 --- a/src/utils/pi-skills.ts +++ b/src/utils/pi-skills.ts @@ -108,18 +108,75 @@ export function collectPiSameRunDependencies(content: string): { skills: string[] prompts: string[] } { + const lineBreak = content.includes("\r\n") ? "\r\n" : "\n" + const lines = String(content || "").split(lineBreak) const skills = new Set() const prompts = new Set() - const text = String(content || "") + let activeFence: { char: "`" | "~"; length: number } | null = null + let inIndentedCodeBlock = false + let previousBlankLine = true + let inBlockquote = false - for (const match of text.matchAll(/\/skill:claude-home:([^\s)]+)/g)) { - if (match[1]) skills.add(match[1]) - } - for (const match of text.matchAll(/Task\s+claude-home:([^\s(]+)\s*\(/g)) { - if (match[1]) skills.add(match[1]) - } - for (const match of text.matchAll(/\/(?:prompt|prompts):claude-home:([^\s)]+)/g)) { - if (match[1]) prompts.add(match[1]) + for (const line of lines) { + const fence = readMarkdownFence(line) + const blankLine = line.trim().length === 0 + + if (activeFence) { + if (fence && fence.char === activeFence.char && fence.length >= activeFence.length) { + activeFence = null + } + continue + } + + if (inIndentedCodeBlock) { + if (blankLine) { + previousBlankLine = true + continue + } + + if (isIndentedCodeBlockLine(line)) { + previousBlankLine = false + continue + } + + inIndentedCodeBlock = false + } + + if (fence) { + activeFence = fence + previousBlankLine = false + continue + } + + if (inBlockquote) { + if (blankLine) { + inBlockquote = false + previousBlankLine = true + continue + } + + if (/^\s*>/.test(line) || !isMarkdownBlockStarter(line)) { + previousBlankLine = false + continue + } + + inBlockquote = false + } + + if (/^\s*>/.test(line)) { + inBlockquote = true + previousBlankLine = false + continue + } + + if (previousBlankLine && isIndentedCodeBlockLine(line) && !isIndentedTaskBulletLine(line)) { + inIndentedCodeBlock = true + previousBlankLine = false + continue + } + + collectPiMarkdownLineDependencies(line, skills, prompts) + previousBlankLine = blankLine } return { @@ -466,10 +523,12 @@ async function copyDirForPiMaterialization( } if (resolvedEntry.kind === "file") { + const sourceStats = await fs.lstat(materializedSourcePath) await fs.mkdir(path.dirname(targetPath), { recursive: true }) await writeFileAtomicIfChanged({ filePath: targetPath, content: await fs.readFile(materializedSourcePath), + mode: sourceStats.mode & 0o777, }) continue } @@ -491,9 +550,11 @@ export async function rewriteSkillFileForPi( const raw = await readText(skillPath) const updated = renderPiSkillContent(raw, targetName, nameMaps, skillPath, options) + const sourceStats = await fs.stat(skillPath) + const sourceMode = sourceStats.mode & 0o777 if (updated !== raw) { - await writeTextAtomicIfChanged({ filePath: skillPath, content: updated }) + await writeTextAtomicIfChanged({ filePath: skillPath, content: updated, mode: sourceMode }) } } @@ -530,8 +591,14 @@ async function materializedSkillFileMatches( nameMaps?: PiNameMaps, options?: PiTransformOptions, ): Promise { - const [sourceRaw, targetRaw] = await Promise.all([readText(sourcePath), readText(targetPath)]) + const [sourceRaw, targetRaw, sourceStats, targetStats] = await Promise.all([ + readText(sourcePath), + readText(targetPath), + fs.stat(sourcePath), + fs.stat(targetPath), + ]) return renderPiSkillContent(sourceRaw, targetName, nameMaps, sourcePath, options) === targetRaw + && (sourceStats.mode & 0o777) === (targetStats.mode & 0o777) } async function fileContentsMatch(sourcePath: string, targetPath: string): Promise { @@ -567,6 +634,21 @@ function splitRawAtFrontmatterEnd(raw: string): { frontmatter: string; body: str } function normalizePiTaskAgentName(value: string, nameMaps?: PiNameMaps, options?: PiTransformOptions): string { + const trimmed = value.trim() + if (options?.rejectUnresolvedFirstPartyQualifiedRefs && trimmed.startsWith("claude-home:")) { + const leafName = trimmed.split(":").filter(Boolean).pop() ?? trimmed + const hasMappedTarget = Boolean( + nameMaps?.agents?.[trimmed] + || nameMaps?.skills?.[trimmed] + || nameMaps?.agents?.[leafName] + || nameMaps?.skills?.[leafName], + ) + + if (!hasMappedTarget) { + throw new Error(`Unsupported unresolved first-party qualified ref for Pi sync: ${trimmed}`) + } + } + return resolvePiMappedName(value, { primary: nameMaps?.agents, secondary: nameMaps?.skills, @@ -588,8 +670,7 @@ function normalizePiSkillReferenceName(value: string, nameMaps?: PiNameMaps, opt function normalizePiPromptReferenceName(value: string, nameMaps?: PiNameMaps, options?: PiTransformOptions): string { const trimmed = value.trim() - const rootNamespace = trimmed.split(":").filter(Boolean)[0] ?? "" - const isFirstPartyQualified = trimmed.includes(":") && ["compound-engineering", "claude-home"].includes(rootNamespace) + const isFirstPartyQualified = isFirstPartyQualifiedPiName(trimmed) const leafName = trimmed.split(":").filter(Boolean).pop() ?? trimmed if (isFirstPartyQualified && options?.rejectUnresolvedFirstPartyQualifiedRefs) { if (!nameMaps?.prompts?.[trimmed] && !nameMaps?.prompts?.[leafName]) { @@ -662,6 +743,76 @@ function resolvePiMappedName( : normalizePiSkillName(leafName) } +function collectPiMarkdownLineDependencies(line: string, skills: Set, prompts: Set): void { + const literals: string[] = [] + const protectedLine = line.replace(/(`+)([^`]*?)\1/g, (match) => { + const index = literals.push(match) - 1 + return `@@PI_LITERAL_${index}@@` + }) + + const taskPattern = /^(\s*(?:(?:[-*])\s+|\d+\.\s+)?)Task\s+([a-z][a-z0-9:_-]*)\(([^)]*)\)/ + const taskMatch = protectedLine.match(taskPattern) + if (taskMatch?.[2]) { + const skillDependency = extractPiSameRunSkillDependency(taskMatch[2]) + if (skillDependency) skills.add(skillDependency) + } + + for (const match of protectedLine.matchAll(/\bRun (?:subagent|ce_subagent) with agent="([^"]+)"/g)) { + const skillDependency = extractPiSameRunSkillDependency(match[1] ?? "") + if (skillDependency) skills.add(skillDependency) + } + + const slashCommandPattern = /(?" function appendCompatibilityNoteIfNeeded(body: string): string { @@ -695,6 +846,7 @@ type PiMaterializedTreeNode = | { kind: "file" sourcePath: string + mode: number renderedContent?: string } @@ -708,6 +860,7 @@ type PiMaterializedMetadataNode = { kind: "directory" | "file" name: string sourcePath: string + mode?: number metadataSignature?: string children?: PiMaterializedMetadataNode[] } @@ -736,6 +889,7 @@ type PiIncrementalOp = type: "writeFile" relativePath: string sourcePath?: string + mode?: number renderedContent?: string } | { @@ -753,7 +907,7 @@ type PiMutationSnapshot = { } type PiSkillFastPathRecord = { - version: 3 + version: 4 policyFingerprint: string renderSignature: string sourceMetadataSignature: string @@ -892,6 +1046,7 @@ async function analyzePiMaterializedTree( node.children.set(child.name, { kind: "file", sourcePath: child.sourcePath, + mode: child.mode ?? 0o644, renderedContent, }) fingerprintHash.update(renderedContent) @@ -901,6 +1056,7 @@ async function analyzePiMaterializedTree( node.children.set(child.name, { kind: "file", sourcePath: child.sourcePath, + mode: child.mode ?? 0o644, }) fingerprintHash.update(await fs.readFile(child.sourcePath)) } @@ -948,6 +1104,8 @@ async function buildPiMaterializedTreeMetadataSummary( hash.update(":") hash.update(String(stats.mtimeMs)) hash.update("") + hash.update(String(stats.mode & 0o777)) + hash.update("") if (resolvedEntry.kind === "directory") { const childSummary = await buildPiMaterializedTreeMetadataSummary(resolvedEntry.sourcePath, options, activeRealDirs) @@ -966,6 +1124,7 @@ async function buildPiMaterializedTreeMetadataSummary( kind: "file", name: resolvedEntry.name, sourcePath: resolvedEntry.sourcePath, + mode: stats.mode & 0o777, }) } @@ -1014,6 +1173,8 @@ async function buildPiTargetTreeNode(targetDir: string, hash: ReturnType, targetPath: string): Promise { + const targetStats = await fs.stat(targetPath) + if ((targetStats.mode & 0o777) !== node.mode) { + return false + } + if (node.renderedContent !== undefined) { const targetRaw = await readText(targetPath) return node.renderedContent === targetRaw @@ -1154,7 +1322,7 @@ async function readPiSkillFastPathRecord(targetDir: string): Promise { const recordPath = resolvePiSkillFastPathRecordPath(targetDir) const record: PiSkillFastPathRecord = { - version: 3, + version: 4, policyFingerprint: getPiPolicyFingerprint(), renderSignature, sourceMetadataSignature, @@ -1229,7 +1397,7 @@ async function applyPiIncrementalOp(targetDir: string, op: PiIncrementalOp): Pro await ensureSafePiMutationTarget(targetPath, "file") if (op.renderedContent !== undefined) { - await writeTextAtomicIfChanged({ filePath: targetPath, content: op.renderedContent }) + await writeTextAtomicIfChanged({ filePath: targetPath, content: op.renderedContent, mode: op.mode }) return } @@ -1238,7 +1406,7 @@ async function applyPiIncrementalOp(targetDir: string, op: PiIncrementalOp): Pro } const sourceBuffer = await fs.readFile(op.sourcePath) - await writeFileAtomicIfChanged({ filePath: targetPath, content: sourceBuffer }) + await writeFileAtomicIfChanged({ filePath: targetPath, content: sourceBuffer, mode: op.mode }) } async function ensureSafePiMutationTarget(targetPath: string, expected: "missing" | "file"): Promise { @@ -1460,13 +1628,16 @@ function transformPiMarkdownLine(line: string, nameMaps?: PiNameMaps, options?: return `/skill:${normalizePiSkillReferenceName(skillName, nameMaps, options)}` } - if (commandName.startsWith("prompts:")) { - const promptName = commandName.slice("prompts:".length) + if (commandName.startsWith("prompt:") || commandName.startsWith("prompts:")) { + const isPluralPromptRef = commandName.startsWith("prompts:") + const promptName = isPluralPromptRef + ? commandName.slice("prompts:".length) + : commandName.slice("prompt:".length) const normalizedPrompt = normalizePiPromptReferenceName(promptName, nameMaps, options) if (normalizedPrompt === promptName && normalizedPrompt.includes(":")) { return match } - return `/${normalizedPrompt}` + return isPluralPromptRef ? `/${normalizedPrompt}` : `/prompt:${normalizedPrompt}` } const withoutPrefix = commandName.startsWith("prompts:") diff --git a/tests/files.test.ts b/tests/files.test.ts index 526b4bad8..9331405a0 100644 --- a/tests/files.test.ts +++ b/tests/files.test.ts @@ -10,6 +10,8 @@ import { removeManagedPathIfExists, restoreManagedPathSnapshot, writeFileAtomicIfChanged, + writeText, + writeTextAtomicIfChanged, } from "../src/utils/files" describe("managed file mutations", () => { @@ -86,6 +88,58 @@ describe("managed file mutations", () => { expect(stats.mode & 0o777).toBe(0o600) }) + test("updates file mode even when atomic binary content is unchanged", async () => { + const tempRoot = await fs.mkdtemp(path.join(os.tmpdir(), "files-atomic-mode-only-update-")) + const targetPath = path.join(tempRoot, "script.sh") + + await writeFileAtomicIfChanged({ + filePath: targetPath, + content: Buffer.from("#!/bin/sh\necho hi\n"), + mode: 0o644, + }) + + await writeFileAtomicIfChanged({ + filePath: targetPath, + content: Buffer.from("#!/bin/sh\necho hi\n"), + mode: 0o755, + }) + + expect((await fs.stat(targetPath)).mode & 0o777).toBe(0o755) + }) + + test("updates file mode even when atomic text content is unchanged", async () => { + const tempRoot = await fs.mkdtemp(path.join(os.tmpdir(), "files-atomic-text-mode-only-update-")) + const targetPath = path.join(tempRoot, "skill.md") + + await writeTextAtomicIfChanged({ + filePath: targetPath, + content: "Body\n", + mode: 0o644, + }) + + await writeTextAtomicIfChanged({ + filePath: targetPath, + content: "Body\n", + mode: 0o755, + }) + + expect((await fs.stat(targetPath)).mode & 0o777).toBe(0o755) + }) + + test("allows generic text writes through symlinked parent directories", async () => { + const tempRoot = await fs.mkdtemp(path.join(os.tmpdir(), "files-write-text-symlink-parent-")) + const realRoot = path.join(tempRoot, "real-root") + const symlinkRoot = path.join(tempRoot, "symlink-root") + const targetPath = path.join(symlinkRoot, "nested", "note.txt") + + await fs.mkdir(realRoot, { recursive: true }) + await fs.symlink(realRoot, symlinkRoot) + + await writeText(targetPath, "hello\n") + + expect(await fs.readFile(path.join(realRoot, "nested", "note.txt"), "utf8")).toBe("hello\n") + }) + test("rejects snapshot restore through symlinked ancestor directories", async () => { const tempRoot = await fs.mkdtemp(path.join(os.tmpdir(), "files-restore-ancestor-symlink-")) const managedRoot = path.join(tempRoot, "managed") diff --git a/tests/pi-writer.test.ts b/tests/pi-writer.test.ts index a81ec56de..39acff8b9 100644 --- a/tests/pi-writer.test.ts +++ b/tests/pi-writer.test.ts @@ -1016,7 +1016,8 @@ Run these research agents: })).rejects.toThrow(/Refusing to write through symlink target|Refusing to restore through symlink target|ENOENT/) expect(await fs.readFile(protectedFile, "utf8")).toBe("protected\n") - expect((await fs.lstat(targetFile)).isSymbolicLink()).toBe(true) + expect((await fs.lstat(targetFile)).isSymbolicLink()).toBe(false) + expect(await fs.readFile(targetFile, "utf8")).toBe("original asset\n") }) test("removes stale generated-agent directories after normalization changes", async () => { @@ -1540,6 +1541,129 @@ Run these research agents: delete process.env.COMPOUND_ENGINEERING_HOME }) + test("restores the prior copied skill tree when a later AGENTS write fails after an incremental install update", async () => { + const tempRoot = await fs.mkdtemp(path.join(os.tmpdir(), "pi-writer-incremental-skill-rollback-")) + const stateHome = path.join(tempRoot, "state-home") + process.env.COMPOUND_ENGINEERING_HOME = stateHome + + const outputRoot = path.join(tempRoot, ".pi") + const sourceSkillDir = path.join(tempRoot, "source-skill") + await fs.mkdir(path.join(sourceSkillDir, "nested"), { recursive: true }) + await fs.writeFile(path.join(sourceSkillDir, "SKILL.md"), "---\nname: docs-skill\n---\n\nBody\n") + await fs.writeFile(path.join(sourceSkillDir, "nested", "stable.txt"), "stable\n") + + await writePiBundle(outputRoot, { + pluginName: "compound-engineering", + prompts: [{ name: "plan-review", content: "before" }], + skillDirs: [{ name: "docs-skill", sourceDir: sourceSkillDir }], + generatedSkills: [], + extensions: [{ name: "extra.ts", content: "export const before = true" }], + }) + + const layout = resolvePiLayout(outputRoot, "install") + const targetSkillFile = path.join(layout.skillsDir, "docs-skill", "nested", "stable.txt") + await fs.writeFile(path.join(sourceSkillDir, "nested", "stable.txt"), "updated\n") + setAtomicWriteFailureHookForTests((filePath, stage) => { + if (filePath === path.join(layout.extensionsDir, "extra.ts") && stage === "beforeRename") { + throw new Error("simulated extension failure") + } + }) + + await expect(writePiBundle(outputRoot, { + pluginName: "compound-engineering", + prompts: [{ name: "plan-review", content: "before" }], + skillDirs: [{ name: "docs-skill", sourceDir: sourceSkillDir }], + generatedSkills: [], + extensions: [{ name: "extra.ts", content: "export const after = true" }], + })).rejects.toThrow("simulated extension failure") + + expect(await fs.readFile(targetSkillFile, "utf8")).toBe("stable\n") + + delete process.env.COMPOUND_ENGINEERING_HOME + }) + + test("preserves executable modes for copied files during install materialization", async () => { + const tempRoot = await fs.mkdtemp(path.join(os.tmpdir(), "pi-writer-preserve-exec-mode-")) + const stateHome = path.join(tempRoot, "state-home") + process.env.COMPOUND_ENGINEERING_HOME = stateHome + + const outputRoot = path.join(tempRoot, ".pi") + const sourceSkillDir = path.join(tempRoot, "source-skill") + const scriptPath = path.join(sourceSkillDir, "scripts", "run.sh") + + await fs.mkdir(path.dirname(scriptPath), { recursive: true }) + await fs.writeFile(path.join(sourceSkillDir, "SKILL.md"), "---\nname: docs-skill\n---\n\nBody\n") + await fs.writeFile(scriptPath, "#!/bin/sh\necho installed\n") + await fs.chmod(scriptPath, 0o755) + + await writePiBundle(outputRoot, { + pluginName: "compound-engineering", + prompts: [], + skillDirs: [{ name: "docs-skill", sourceDir: sourceSkillDir }], + generatedSkills: [], + extensions: [], + }) + + const targetStats = await fs.stat(path.join(resolvePiLayout(outputRoot, "install").skillsDir, "docs-skill", "scripts", "run.sh")) + expect(targetStats.mode & 0o777).toBe(0o755) + + delete process.env.COMPOUND_ENGINEERING_HOME + }) + + test("preserves non-default mode for rewritten SKILL.md during install materialization", async () => { + const tempRoot = await fs.mkdtemp(path.join(os.tmpdir(), "pi-writer-skill-md-mode-")) + const stateHome = path.join(tempRoot, "state-home") + process.env.COMPOUND_ENGINEERING_HOME = stateHome + + const outputRoot = path.join(tempRoot, ".pi") + const sourceSkillDir = path.join(tempRoot, "source-skill") + const skillPath = path.join(sourceSkillDir, "SKILL.md") + + await fs.mkdir(sourceSkillDir, { recursive: true }) + await fs.writeFile(skillPath, "---\nname: docs_skill\n---\n\nBody\n") + await fs.chmod(skillPath, 0o755) + + await writePiBundle(outputRoot, { + pluginName: "compound-engineering", + prompts: [], + skillDirs: [{ name: "docs_skill", sourceDir: sourceSkillDir }], + generatedSkills: [], + extensions: [], + }) + + const targetStats = await fs.stat(path.join(resolvePiLayout(outputRoot, "install").skillsDir, "docs_skill", "SKILL.md")) + expect(targetStats.mode & 0o777).toBe(0o755) + + delete process.env.COMPOUND_ENGINEERING_HOME + }) + + test("rejects unresolved first-party structured subagent refs during install copied-skill materialization", async () => { + const tempRoot = await fs.mkdtemp(path.join(os.tmpdir(), "pi-writer-unresolved-structured-first-party-")) + const outputRoot = path.join(tempRoot, ".pi") + const sourceSkillDir = path.join(tempRoot, "source-skill") + + await fs.mkdir(sourceSkillDir, { recursive: true }) + await fs.writeFile( + path.join(sourceSkillDir, "SKILL.md"), + [ + "---", + "name: docs-skill", + "description: unresolved structured first-party ref", + "---", + "", + 'Run subagent with agent="claude-home:missing-agent" and task="feature_description".', + ].join("\n"), + ) + + await expect(writePiBundle(outputRoot, { + pluginName: "compound-engineering", + prompts: [], + skillDirs: [{ name: "docs-skill", sourceDir: sourceSkillDir }], + generatedSkills: [], + extensions: [], + })).rejects.toThrow("Unsupported unresolved first-party qualified ref for Pi sync: claude-home:missing-agent") + }) + test("rejects symlinked AGENTS.md targets during Pi bundle writes", async () => { const tempRoot = await fs.mkdtemp(path.join(os.tmpdir(), "pi-writer-agents-symlink-")) const outputRoot = path.join(tempRoot, ".pi") @@ -1699,6 +1823,42 @@ Run these research agents: delete process.env.COMPOUND_ENGINEERING_HOME }) + test("does not take outer rollback snapshots for managed-state files after a successful install commit", async () => { + const tempRoot = await fs.mkdtemp(path.join(os.tmpdir(), "pi-writer-managed-state-postcommit-snapshot-")) + const stateHome = path.join(tempRoot, "state-home") + process.env.COMPOUND_ENGINEERING_HOME = stateHome + + const outputRoot = path.join(tempRoot, ".pi") + await writePiBundle(outputRoot, { + pluginName: "compound-engineering", + prompts: [{ name: "old-plan", content: "Old prompt" }], + skillDirs: [], + generatedSkills: [], + extensions: [], + }) + + const layout = resolvePiLayout(outputRoot, "install") + setManagedPathSnapshotHookForTests((targetPath) => { + if (targetPath === layout.managedManifestPath || targetPath === layout.verificationPath) { + throw new Error("managed state should not be outer-snapshotted after commit") + } + }) + + await writePiBundle(outputRoot, { + pluginName: "compound-engineering", + prompts: [{ name: "new-plan", content: "New prompt" }], + skillDirs: [], + generatedSkills: [], + extensions: [], + }) + + const trust = await loadPiManagedStateWithTrust(layout) + expect(trust.status).toBe("verified") + expect(trust.state?.install.artifacts.map((artifact) => artifact.emittedName)).toEqual(["new-plan"]) + + delete process.env.COMPOUND_ENGINEERING_HOME + }) + test("removes the canonical compat extension when install no longer owns it", async () => { const tempRoot = await fs.mkdtemp(path.join(os.tmpdir(), "pi-writer-install-compat-removal-")) const stateHome = path.join(tempRoot, "state-home") diff --git a/tests/sync-pi.test.ts b/tests/sync-pi.test.ts index 8be8e1975..e5f6981a6 100644 --- a/tests/sync-pi.test.ts +++ b/tests/sync-pi.test.ts @@ -411,6 +411,14 @@ describe("syncToPi", () => { skillPath: path.join(sourceSkillDir, "SKILL.md"), }, ], + commands: [ + { + name: "plan-review", + description: "forces later prompt write", + body: "before", + sourcePath: path.join(tempRoot, "commands", "plan-review.md"), + }, + ], mcpServers: {}, } @@ -1377,6 +1385,14 @@ describe("syncToPi", () => { skillPath: path.join(sourceSkillDir, "SKILL.md"), }, ], + commands: [ + { + name: "plan-review", + description: "forces later prompt write", + body: "before", + sourcePath: path.join(tempRoot, "commands", "plan-review.md"), + }, + ], mcpServers: {}, } @@ -1467,6 +1483,14 @@ describe("syncToPi", () => { skillPath: path.join(sourceSkillDir, "SKILL.md"), }, ], + commands: [ + { + name: "plan-review", + description: "forces later prompt write", + body: "before", + sourcePath: path.join(tempRoot, "commands", "plan-review.md"), + }, + ], mcpServers: {}, } @@ -3530,7 +3554,7 @@ describe("syncToPi", () => { warnSpy.mockRestore() }) - test("re-renders sibling skill refs against the final published alias set after unsupported siblings drop out", async () => { + test("omits qualified same-run skill refs when the sibling becomes unsupported-final", async () => { const tempRoot = await fs.mkdtemp(path.join(os.tmpdir(), "sync-pi-sibling-publication-atomicity-")) const docsSkillDir = path.join(tempRoot, "docs-skill") const badSkillDir = path.join(tempRoot, "bad-skill") @@ -3586,15 +3610,13 @@ describe("syncToPi", () => { }, tempRoot) await expect(fs.access(path.join(tempRoot, "skills", "bad-skill", "SKILL.md"))).rejects.toBeDefined() - const docsSkill = await fs.readFile(path.join(tempRoot, "skills", "docs-skill", "SKILL.md"), "utf8") + await expect(fs.access(path.join(tempRoot, "skills", "docs-skill", "SKILL.md"))).rejects.toBeDefined() expect(await fs.readFile(path.join(tempRoot, "skills", "good-skill", "SKILL.md"), "utf8")).toContain("Body") - expect(docsSkill).not.toContain("/skill:bad-skill") - expect(docsSkill).toContain("/skill:claude-home:bad-skill") const trust = await loadPiManagedStateWithTrust(resolvePiLayout(tempRoot, "sync")) expect(trust.state?.sync.nameMaps.skills["good-skill"]).toBe("good-skill") expect(trust.state?.sync.nameMaps.skills["bad-skill"]).toBeUndefined() - expect(trust.state?.sync.nameMaps.skills["docs-skill"]).toBe("docs-skill") + expect(trust.state?.sync.nameMaps.skills["docs-skill"]).toBeUndefined() expect(warnSpy).toHaveBeenCalledWith(expect.stringContaining("Skipping unsupported Pi sync skill bad-skill")) warnSpy.mockRestore() @@ -3618,7 +3640,7 @@ describe("syncToPi", () => { "description: retry after sibling shrink", "---", "", - "- Task compound-engineering:review:bad(feature_description)", + "- Task bad(feature_description)", ].join("\n"), ) await fs.writeFile( @@ -3643,21 +3665,17 @@ describe("syncToPi", () => { mcpServers: {}, }, tempRoot) - const docsSkill = await fs.readFile(path.join(tempRoot, "skills", "docs-skill", "SKILL.md"), "utf8") - expect(docsSkill).toContain('Run ce_subagent with agent="bad" and task="feature_description".') - await expect(fs.access(path.join(tempRoot, "skills", "bad", "SKILL.md"))).rejects.toBeDefined() - expect(await fs.readFile(path.join(tempRoot, "skills", "valid-skill", "SKILL.md"), "utf8")).toContain("Body") expect(warnSpy).toHaveBeenCalledWith(expect.stringContaining("Skipping unsupported Pi sync skill bad")) - expect(warnSpy).not.toHaveBeenCalledWith(expect.stringContaining("Skipping unsupported Pi sync skill docs-skill")) const trust = await loadPiManagedStateWithTrust(resolvePiLayout(tempRoot, "sync")) - expect(trust.state?.sync.nameMaps.skills["docs-skill"]).toBe("docs-skill") + expect(trust.state?.sync.nameMaps.skills["docs-skill"]).toBeUndefined() expect(trust.state?.sync.nameMaps.skills.bad).toBeUndefined() + expect(trust.state?.sync.nameMaps.skills["valid-skill"]).toBe("valid-skill") warnSpy.mockRestore() }) - test("retries a first-pass blocked prompt after a colliding sibling skill drops out", async () => { + test("does not publish dependent prompts when a same-run sibling becomes unsupported-final", async () => { const tempRoot = await fs.mkdtemp(path.join(os.tmpdir(), "sync-pi-retry-prompt-after-shrink-")) const blockingSkillDir = path.join(tempRoot, "blocking-skill") const warnSpy = spyOn(console, "warn").mockImplementation(() => {}) @@ -3683,7 +3701,7 @@ describe("syncToPi", () => { { name: "plan-review", description: "Prompt retries after sibling shrink", - body: "- Task compound-engineering:review:bad(feature_description)", + body: "- Task bad(feature_description)", sourcePath: path.join(tempRoot, "commands", "plan-review.md"), }, { @@ -3696,14 +3714,13 @@ describe("syncToPi", () => { mcpServers: {}, }, tempRoot) - expect(await fs.readFile(path.join(tempRoot, "prompts", "plan-review.md"), "utf8")).toContain('Run ce_subagent with agent="bad" and task="feature_description".') + await expect(fs.access(path.join(tempRoot, "prompts", "plan-review.md"))).rejects.toBeDefined() expect(await fs.readFile(path.join(tempRoot, "prompts", "safe-review.md"), "utf8")).toContain("Body") await expect(fs.access(path.join(tempRoot, "skills", "bad", "SKILL.md"))).rejects.toBeDefined() expect(warnSpy).toHaveBeenCalledWith(expect.stringContaining("Skipping unsupported Pi sync skill bad")) - expect(warnSpy).not.toHaveBeenCalledWith(expect.stringContaining("Skipping unsupported Pi sync command plan-review")) const trust = await loadPiManagedStateWithTrust(resolvePiLayout(tempRoot, "sync")) - expect(trust.state?.sync.nameMaps.prompts["plan-review"]).toBe("plan-review") + expect(trust.state?.sync.nameMaps.prompts["plan-review"]).toBeUndefined() warnSpy.mockRestore() }) @@ -3762,6 +3779,400 @@ describe("syncToPi", () => { expect(syncedPrompt).toContain('Run ce_subagent with agent="ce-plan" and task="feature_description".') }) + test("keeps a prompt published when it depends on a same-run skill that publishes", async () => { + const tempRoot = await fs.mkdtemp(path.join(os.tmpdir(), "sync-pi-cross-type-prompt-skill-")) + const planSkillDir = path.join(tempRoot, "plan-skill") + + await fs.mkdir(planSkillDir, { recursive: true }) + await fs.writeFile(path.join(planSkillDir, "SKILL.md"), "---\nname: ce:plan\n---\n\nBody\n") + + await syncToPi({ + skills: [ + { + name: "ce:plan", + sourceDir: planSkillDir, + skillPath: path.join(planSkillDir, "SKILL.md"), + }, + ], + commands: [ + { + name: "plan-review", + description: "depends on same-run skill", + body: "- Task claude-home:ce:plan(feature_description)", + sourcePath: path.join(tempRoot, "commands", "plan-review.md"), + }, + ], + mcpServers: {}, + }, tempRoot) + + expect(await fs.readFile(path.join(tempRoot, "prompts", "plan-review.md"), "utf8")).toContain('Run ce_subagent with agent="ce-plan" and task="feature_description".') + + const trust = await loadPiManagedStateWithTrust(resolvePiLayout(tempRoot, "sync")) + expect(trust.state?.sync.nameMaps.prompts["plan-review"]).toBe("plan-review") + expect(trust.state?.sync.artifacts.some((artifact) => artifact.kind === "prompt" && artifact.emittedName === "plan-review")).toBe(true) + }) + + test("keeps a skill published when it depends on a same-run prompt that publishes", async () => { + const tempRoot = await fs.mkdtemp(path.join(os.tmpdir(), "sync-pi-cross-type-skill-prompt-")) + const docsSkillDir = path.join(tempRoot, "docs-skill") + + await fs.mkdir(docsSkillDir, { recursive: true }) + await fs.writeFile( + path.join(docsSkillDir, "SKILL.md"), + [ + "---", + "name: docs-skill", + "description: depends on same-run prompt", + "---", + "", + "- /prompt:claude-home:plan-review", + ].join("\n"), + ) + + await syncToPi({ + skills: [ + { + name: "docs-skill", + sourceDir: docsSkillDir, + skillPath: path.join(docsSkillDir, "SKILL.md"), + }, + ], + commands: [ + { + name: "plan-review", + description: "published sibling prompt", + body: "Body", + sourcePath: path.join(tempRoot, "commands", "plan-review.md"), + }, + ], + mcpServers: {}, + }, tempRoot) + + expect(await fs.readFile(path.join(tempRoot, "skills", "docs-skill", "SKILL.md"), "utf8")).toContain("/prompt:plan-review") + + const trust = await loadPiManagedStateWithTrust(resolvePiLayout(tempRoot, "sync")) + expect(trust.state?.sync.nameMaps.skills["docs-skill"]).toBe("docs-skill") + expect(trust.state?.sync.artifacts.some((artifact) => artifact.kind === "synced-skill" && artifact.emittedName === "docs-skill")).toBe(true) + }) + + test("demotes punctuated same-run skill refs when the sibling skill drops out", async () => { + const tempRoot = await fs.mkdtemp(path.join(os.tmpdir(), "sync-pi-punctuated-skill-dependency-")) + const badSkillDir = path.join(tempRoot, "bad-skill") + const warnSpy = spyOn(console, "warn").mockImplementation(() => {}) + + await fs.mkdir(badSkillDir, { recursive: true }) + await fs.writeFile( + path.join(badSkillDir, "SKILL.md"), + [ + "---", + "name: ce:plan", + "description: bad sibling", + "---", + "", + "- Task unknown-plugin:review:bad(feature_description)", + ].join("\n"), + ) + + await syncToPi({ + skills: [ + { + name: "ce:plan", + sourceDir: badSkillDir, + skillPath: path.join(badSkillDir, "SKILL.md"), + }, + ], + commands: [ + { + name: "plan-review", + description: "punctuated dependency", + body: "See /skill:claude-home:ce:plan, then continue.", + sourcePath: path.join(tempRoot, "commands", "plan-review.md"), + }, + ], + mcpServers: {}, + }, tempRoot) + + await expect(fs.access(path.join(tempRoot, "prompts", "plan-review.md"))).rejects.toBeDefined() + const trust = await loadPiManagedStateWithTrust(resolvePiLayout(tempRoot, "sync")) + expect(trust.state?.sync.nameMaps.prompts["plan-review"]).toBeUndefined() + warnSpy.mockRestore() + }) + + test("demotes unqualified same-run Task refs when the sibling skill drops out", async () => { + const tempRoot = await fs.mkdtemp(path.join(os.tmpdir(), "sync-pi-unqualified-task-dependency-")) + const badSkillDir = path.join(tempRoot, "bad-skill") + + await fs.mkdir(badSkillDir, { recursive: true }) + await fs.writeFile( + path.join(badSkillDir, "SKILL.md"), + [ + "---", + "name: ce:plan", + "description: bad sibling", + "---", + "", + "- Task unknown-plugin:review:bad(feature_description)", + ].join("\n"), + ) + + await syncToPi({ + skills: [ + { + name: "ce:plan", + sourceDir: badSkillDir, + skillPath: path.join(badSkillDir, "SKILL.md"), + }, + ], + commands: [ + { + name: "plan-review", + description: "unqualified same-run dependency", + body: "- Task ce:plan(feature_description)", + sourcePath: path.join(tempRoot, "commands", "plan-review.md"), + }, + ], + mcpServers: {}, + }, tempRoot) + + await expect(fs.access(path.join(tempRoot, "prompts", "plan-review.md"))).rejects.toBeDefined() + const trust = await loadPiManagedStateWithTrust(resolvePiLayout(tempRoot, "sync")) + expect(trust.state?.sync.nameMaps.prompts["plan-review"]).toBeUndefined() + }) + + test("demotes structured same-run subagent refs when the sibling skill drops out", async () => { + const tempRoot = await fs.mkdtemp(path.join(os.tmpdir(), "sync-pi-structured-agent-dependency-")) + const badSkillDir = path.join(tempRoot, "bad-skill") + + await fs.mkdir(badSkillDir, { recursive: true }) + await fs.writeFile( + path.join(badSkillDir, "SKILL.md"), + [ + "---", + "name: ce:plan", + "description: bad sibling", + "---", + "", + "- Task unknown-plugin:review:bad(feature_description)", + ].join("\n"), + ) + + await syncToPi({ + skills: [ + { + name: "ce:plan", + sourceDir: badSkillDir, + skillPath: path.join(badSkillDir, "SKILL.md"), + }, + ], + commands: [ + { + name: "plan-review", + description: "structured same-run dependency", + body: 'Run subagent with agent="claude-home:ce:plan" and task="feature_description".', + sourcePath: path.join(tempRoot, "commands", "plan-review.md"), + }, + ], + mcpServers: {}, + }, tempRoot) + + await expect(fs.access(path.join(tempRoot, "prompts", "plan-review.md"))).rejects.toBeDefined() + }) + + test("ignores same-run refs that only appear inside fenced code blocks", async () => { + const tempRoot = await fs.mkdtemp(path.join(os.tmpdir(), "sync-pi-code-block-dependency-")) + const badSkillDir = path.join(tempRoot, "bad-skill") + + await fs.mkdir(badSkillDir, { recursive: true }) + await fs.writeFile( + path.join(badSkillDir, "SKILL.md"), + [ + "---", + "name: ce:plan", + "description: bad sibling", + "---", + "", + "- Task unknown-plugin:review:bad(feature_description)", + ].join("\n"), + ) + + await syncToPi({ + skills: [ + { + name: "ce:plan", + sourceDir: badSkillDir, + skillPath: path.join(badSkillDir, "SKILL.md"), + }, + ], + commands: [ + { + name: "plan-review", + description: "code block example only", + body: [ + "Example:", + "```md", + "/skill:claude-home:ce:plan", + "```", + ].join("\n"), + sourcePath: path.join(tempRoot, "commands", "plan-review.md"), + }, + ], + mcpServers: {}, + }, tempRoot) + + const prompt = await fs.readFile(path.join(tempRoot, "prompts", "plan-review.md"), "utf8") + expect(prompt).toContain("/skill:claude-home:ce:plan") + const trust = await loadPiManagedStateWithTrust(resolvePiLayout(tempRoot, "sync")) + expect(trust.state?.sync.nameMaps.prompts["plan-review"]).toBe("plan-review") + }) + + test("skips unresolved first-party structured subagent refs instead of normalizing to leaf agents", async () => { + const tempRoot = await fs.mkdtemp(path.join(os.tmpdir(), "sync-pi-unresolved-structured-first-party-")) + const warnSpy = spyOn(console, "warn").mockImplementation(() => {}) + + await syncToPi({ + skills: [], + commands: [ + { + name: "plan-review", + description: "structured first-party missing agent", + body: 'Run subagent with agent="claude-home:missing-agent" and task="feature_description".', + sourcePath: path.join(tempRoot, "commands", "plan-review.md"), + }, + ], + mcpServers: {}, + }, tempRoot) + + await expect(fs.access(path.join(tempRoot, "prompts", "plan-review.md"))).rejects.toBeDefined() + expect(warnSpy).toHaveBeenCalledWith(expect.stringContaining("Skipping unsupported Pi sync command plan-review")) + warnSpy.mockRestore() + }) + + test("preserves executable modes for copied files during sync materialization", async () => { + const tempRoot = await fs.mkdtemp(path.join(os.tmpdir(), "sync-pi-preserve-exec-mode-")) + const sourceSkillDir = path.join(tempRoot, "source-skill") + const scriptPath = path.join(sourceSkillDir, "scripts", "run.sh") + + await fs.mkdir(path.dirname(scriptPath), { recursive: true }) + await fs.writeFile(path.join(sourceSkillDir, "SKILL.md"), "---\nname: docs-skill\n---\n\nBody\n") + await fs.writeFile(scriptPath, "#!/bin/sh\necho synced\n") + await fs.chmod(scriptPath, 0o755) + + await syncToPi({ + skills: [ + { + name: "docs-skill", + sourceDir: sourceSkillDir, + skillPath: path.join(sourceSkillDir, "SKILL.md"), + }, + ], + mcpServers: {}, + }, tempRoot) + + const targetStats = await fs.stat(path.join(tempRoot, "skills", "docs-skill", "scripts", "run.sh")) + expect(targetStats.mode & 0o777).toBe(0o755) + }) + + test("updates copied file mode when the source mode changes without content changes", async () => { + const tempRoot = await fs.mkdtemp(path.join(os.tmpdir(), "sync-pi-mode-only-update-")) + const sourceSkillDir = path.join(tempRoot, "source-skill") + const scriptPath = path.join(sourceSkillDir, "scripts", "run.sh") + const targetPath = path.join(tempRoot, "skills", "docs-skill", "scripts", "run.sh") + + await fs.mkdir(path.dirname(scriptPath), { recursive: true }) + await fs.writeFile(path.join(sourceSkillDir, "SKILL.md"), "---\nname: docs-skill\n---\n\nBody\n") + await fs.writeFile(scriptPath, "#!/bin/sh\necho synced\n") + await fs.chmod(scriptPath, 0o644) + + await syncToPi({ + skills: [ + { + name: "docs-skill", + sourceDir: sourceSkillDir, + skillPath: path.join(sourceSkillDir, "SKILL.md"), + }, + ], + mcpServers: {}, + }, tempRoot) + + expect((await fs.stat(targetPath)).mode & 0o777).toBe(0o644) + + await fs.chmod(scriptPath, 0o755) + await syncToPi({ + skills: [ + { + name: "docs-skill", + sourceDir: sourceSkillDir, + skillPath: path.join(sourceSkillDir, "SKILL.md"), + }, + ], + mcpServers: {}, + }, tempRoot) + + expect((await fs.stat(targetPath)).mode & 0o777).toBe(0o755) + }) + + test("preserves non-default mode for rewritten SKILL.md during sync materialization", async () => { + const tempRoot = await fs.mkdtemp(path.join(os.tmpdir(), "sync-pi-skill-md-mode-")) + const sourceSkillDir = path.join(tempRoot, "source-skill") + const skillPath = path.join(sourceSkillDir, "SKILL.md") + + await fs.mkdir(sourceSkillDir, { recursive: true }) + await fs.writeFile(skillPath, "---\nname: docs_skill\n---\n\nBody\n") + await fs.chmod(skillPath, 0o755) + + await syncToPi({ + skills: [ + { + name: "docs_skill", + sourceDir: sourceSkillDir, + skillPath, + }, + ], + mcpServers: {}, + }, tempRoot) + + const targetStats = await fs.stat(path.join(tempRoot, "skills", "docs-skill", "SKILL.md")) + expect(targetStats.mode & 0o777).toBe(0o755) + }) + + test("updates rewritten SKILL.md mode when the source mode changes without content changes", async () => { + const tempRoot = await fs.mkdtemp(path.join(os.tmpdir(), "sync-pi-skill-md-mode-only-update-")) + const sourceSkillDir = path.join(tempRoot, "source-skill") + const skillPath = path.join(sourceSkillDir, "SKILL.md") + const targetPath = path.join(tempRoot, "skills", "docs-skill", "SKILL.md") + + await fs.mkdir(sourceSkillDir, { recursive: true }) + await fs.writeFile(skillPath, "---\nname: docs_skill\n---\n\nBody\n") + await fs.chmod(skillPath, 0o644) + + await syncToPi({ + skills: [ + { + name: "docs_skill", + sourceDir: sourceSkillDir, + skillPath, + }, + ], + mcpServers: {}, + }, tempRoot) + + expect((await fs.stat(targetPath)).mode & 0o777).toBe(0o644) + + await fs.chmod(skillPath, 0o755) + await syncToPi({ + skills: [ + { + name: "docs_skill", + sourceDir: sourceSkillDir, + skillPath, + }, + ], + mcpServers: {}, + }, tempRoot) + + expect((await fs.stat(targetPath)).mode & 0o777).toBe(0o755) + }) + test("narrows second-pass sync work to retryable artifacts only", async () => { const tempRoot = await fs.mkdtemp(path.join(os.tmpdir(), "sync-pi-narrow-rerun-")) const blockingSkillDir = path.join(tempRoot, "blocking-skill") @@ -4203,6 +4614,51 @@ describe("syncToPi", () => { } }) + test("does not take outer rollback snapshots for managed-state files after a successful sync commit", async () => { + const tempRoot = await fs.mkdtemp(path.join(os.tmpdir(), "sync-pi-managed-state-postcommit-snapshot-")) + const stateHome = path.join(tempRoot, "state-home") + process.env.COMPOUND_ENGINEERING_HOME = stateHome + + await syncToPi({ + skills: [], + commands: [ + { + name: "old-plan", + description: "old", + body: "Old body", + sourcePath: path.join(tempRoot, "commands", "old-plan.md"), + }, + ], + mcpServers: {}, + }, tempRoot) + + const layout = resolvePiLayout(tempRoot, "sync") + setManagedPathSnapshotHookForTests((targetPath) => { + if (targetPath === layout.managedManifestPath || targetPath === layout.verificationPath) { + throw new Error("managed state should not be outer-snapshotted after commit") + } + }) + + await syncToPi({ + skills: [], + commands: [ + { + name: "new-plan", + description: "new", + body: "New body", + sourcePath: path.join(tempRoot, "commands", "new-plan.md"), + }, + ], + mcpServers: {}, + }, tempRoot) + + const trust = await loadPiManagedStateWithTrust(layout) + expect(trust.status).toBe("verified") + expect(trust.state?.sync.artifacts.map((artifact) => artifact.emittedName)).toEqual(["new-plan"]) + + delete process.env.COMPOUND_ENGINEERING_HOME + }) + test("removes renamed synced prompts after a later verified rerun from a legacy prompt filename", async () => { const tempRoot = await fs.mkdtemp(path.join(os.tmpdir(), "sync-pi-legacy-prompt-rename-")) const stateHome = path.join(tempRoot, "state-home") @@ -5113,6 +5569,14 @@ describe("syncToPi", () => { skillPath: path.join(sourceSkillDir, "SKILL.md"), }, ], + commands: [ + { + name: "plan-review", + description: "forces later prompt write", + body: "before", + sourcePath: path.join(tempRoot, "commands", "plan-review.md"), + }, + ], mcpServers: {}, } @@ -5145,6 +5609,14 @@ describe("syncToPi", () => { skillPath: path.join(sourceSkillDir, "SKILL.md"), }, ], + commands: [ + { + name: "plan-review", + description: "forces later prompt write", + body: "before", + sourcePath: path.join(tempRoot, "commands", "plan-review.md"), + }, + ], mcpServers: {}, } @@ -5163,7 +5635,7 @@ describe("syncToPi", () => { delete process.env.COMPOUND_ENGINEERING_HOME }) - test("avoids whole-directory publication snapshots for incremental synced skill updates", async () => { + test("snapshots incremental synced skill directories before later outer transaction work", async () => { const tempRoot = await fs.mkdtemp(path.join(os.tmpdir(), "sync-pi-incremental-skill-snapshot-")) const stateHome = path.join(tempRoot, "state-home") const sourceSkillDir = path.join(tempRoot, "claude-skill") @@ -5196,12 +5668,62 @@ describe("syncToPi", () => { await syncToPi(config, tempRoot) - expect(snapshottedPaths).not.toContain(targetSkillDir) + expect(snapshottedPaths).toContain(targetSkillDir) expect(await fs.readFile(path.join(targetSkillDir, "nested", "stable.txt"), "utf8")).toBe("updated\n") delete process.env.COMPOUND_ENGINEERING_HOME }) + test("restores the prior synced skill tree when a later AGENTS write fails after an incremental skill update", async () => { + const tempRoot = await fs.mkdtemp(path.join(os.tmpdir(), "sync-pi-incremental-skill-rollback-")) + const stateHome = path.join(tempRoot, "state-home") + const sourceSkillDir = path.join(tempRoot, "claude-skill") + process.env.COMPOUND_ENGINEERING_HOME = stateHome + + await fs.mkdir(path.join(sourceSkillDir, "nested"), { recursive: true }) + await fs.writeFile(path.join(sourceSkillDir, "SKILL.md"), "---\nname: docs-skill\n---\n\nBody\n") + await fs.writeFile(path.join(sourceSkillDir, "nested", "stable.txt"), "stable\n") + + const config: ClaudeHomeConfig = { + skills: [ + { + name: "docs-skill", + sourceDir: sourceSkillDir, + skillPath: path.join(sourceSkillDir, "SKILL.md"), + }, + ], + mcpServers: {}, + } + + await syncToPi(config, tempRoot) + + const layout = resolvePiLayout(tempRoot, "sync") + const targetSkillFile = path.join(layout.skillsDir, "docs-skill", "nested", "stable.txt") + await fs.writeFile(path.join(sourceSkillDir, "nested", "stable.txt"), "updated\n") + const promptPath = path.join(layout.promptsDir, "plan-review.md") + const failingConfig: ClaudeHomeConfig = { + ...config, + commands: [ + { + name: "plan-review", + description: "forces later prompt write", + body: "after", + sourcePath: path.join(tempRoot, "commands", "plan-review.md"), + }, + ], + } + setAtomicWriteFailureHookForTests((filePath, stage) => { + if (filePath === promptPath && stage === "beforeRename") { + throw new Error("simulated prompt failure") + } + }) + + await expect(syncToPi(failingConfig, tempRoot)).rejects.toThrow("simulated prompt failure") + expect(await fs.readFile(targetSkillFile, "utf8")).toBe("stable\n") + + delete process.env.COMPOUND_ENGINEERING_HOME + }) + test("does not perform full deep compare for unchanged synced skill directories on stable reruns", async () => { const tempRoot = await fs.mkdtemp(path.join(os.tmpdir(), "sync-pi-skill-fast-path-")) const stateHome = path.join(tempRoot, "state-home")