-
Notifications
You must be signed in to change notification settings - Fork 0
Expand file tree
/
Copy pathtechpack.yaml
More file actions
140 lines (126 loc) · 5.03 KB
/
techpack.yaml
File metadata and controls
140 lines (126 loc) · 5.03 KB
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
schemaVersion: 1
identifier: memory
displayName: "Memory"
description: Persistent memory and knowledge management for Claude Code
author: Bruno Guidolim
minMCSVersion: "2026.3.22"
# ---------------------------------------------------------------------------
# Components
# ---------------------------------------------------------------------------
components:
# ── Dependencies ────────────────────────────────────────────────────────
- id: node
displayName: Node.js
description: JavaScript runtime (for npx-based MCP servers)
brew: node
- id: gh
displayName: GitHub CLI
description: GitHub CLI for PR operations
brew: gh
- id: jq
displayName: jq
description: Lightweight JSON processor
brew: jq
- id: ollama
displayName: Ollama
description: Local LLM runtime
brew: ollama
- id: ollama-service
displayName: Ollama service
description: Start Ollama as background service
type: configuration
dependencies: [ollama]
shell: "brew services start ollama"
doctorChecks:
- type: commandExists
name: "Ollama service running"
section: AI Models
command: curl
args: ["-sf", "http://localhost:11434/api/tags"]
fixCommand: "brew services start ollama"
- id: ollama-nomic-embed
description: Pull nomic-embed-text model for docs-mcp-server embeddings
type: configuration
dependencies: [ollama]
shell: "ollama pull nomic-embed-text"
doctorChecks:
- type: commandExists
name: "nomic-embed-text model"
section: AI Models
command: ollama
args: ["show", "nomic-embed-text"]
# ── MCP Servers ─────────────────────────────────────────────────────────
- id: docs-mcp-server
description: Semantic search over memories using local Ollama embeddings
isRequired: true
dependencies: [node, ollama]
mcp:
command: npx
args:
- "-y"
- "@arabold/docs-mcp-server@v2.0.4"
- "--read-only"
- "--telemetry=false"
env:
OPENAI_API_KEY: "ollama"
OPENAI_API_BASE: "http://localhost:11434/v1"
DOCS_MCP_EMBEDDING_MODEL: "openai:nomic-embed-text"
# ── Skills ──────────────────────────────────────────────────────────────
- id: skill-continuous-learning
displayName: continuous-learning skill
description: Extracts learnings and decisions from sessions into memory
isRequired: true
skill:
source: skills/continuous-learning
destination: continuous-learning
# ── Hooks ───────────────────────────────────────────────────────────────
- id: hook-continuous-learning
displayName: Continuous learning activator
description: Reminds to evaluate learnings on each prompt
isRequired: true
hookEvent: UserPromptSubmit
hook:
source: hooks/continuous-learning-activator.sh
destination: continuous-learning-activator.sh
- id: hook-sync-memories
displayName: Sync memories hook
description: Checks Ollama health and syncs docs-mcp-server library on session start
dependencies: [ollama, docs-mcp-server, jq]
hookEvent: SessionStart
hookAsync: true
hookTimeout: 120
hookStatusMessage: "Indexing memories..."
hook:
source: hooks/sync-memories.sh
destination: sync-memories.sh
- id: hook-reindex-memories
displayName: Reindex memories hook
description: Reindexes docs-mcp-server library when memories have changed mid-session
dependencies: [ollama, docs-mcp-server, jq]
hookEvent: UserPromptSubmit
hookAsync: true
hookTimeout: 120
hookStatusMessage: "Reindexing memories..."
hook:
source: hooks/sync-memories.sh
destination: sync-memories.sh
# ── Configuration ───────────────────────────────────────────────────────
- id: settings
displayName: Settings
description: Disables built-in auto-memory in favor of continuous learning system
settingsFile: config/settings.json
- id: gitignore
displayName: Global gitignore
description: Ignores memory files from version control
gitignore:
- "*.local.*"
- ".claude/memories"
- ".claude/.memories-last-indexed"
# ---------------------------------------------------------------------------
# Templates — CLAUDE.local.md sections
# ---------------------------------------------------------------------------
templates:
- sectionIdentifier: continuous-learning
placeholders:
- __PROJECT_DIR_NAME__
contentFile: templates/continuous-learning.md