-
Notifications
You must be signed in to change notification settings - Fork 0
Expand file tree
/
Copy pathpyproject.toml
More file actions
110 lines (105 loc) · 3.97 KB
/
pyproject.toml
File metadata and controls
110 lines (105 loc) · 3.97 KB
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
[build-system]
requires = ["hatchling"]
build-backend = "hatchling.build"
[project]
name = "helix-context"
version = "0.4.0b1"
description = "Coordinate index layer for LLM context — Helix weighs, doesn't retrieve"
readme = "README.md"
requires-python = ">=3.11"
license = "Apache-2.0"
authors = [
{ name = "Michael Bachaud", email = "swiftwing21@users.noreply.github.com" },
]
keywords = ["llm", "context", "compression", "ollama", "genome", "rag"]
classifiers = [
"Development Status :: 4 - Beta",
"Intended Audience :: Developers",
"License :: OSI Approved :: Apache Software License",
"Topic :: Scientific/Engineering :: Artificial Intelligence",
"Programming Language :: Python :: 3.11",
"Programming Language :: Python :: 3.12",
"Programming Language :: Python :: 3.13",
]
dependencies = [
"fastapi>=0.110",
"uvicorn>=0.29",
"httpx>=0.27",
"pydantic>=2.6",
]
[project.urls]
Homepage = "https://github.com/SwiftWing21/helix-context"
Repository = "https://github.com/SwiftWing21/helix-context"
Issues = "https://github.com/SwiftWing21/helix-context/issues"
[project.optional-dependencies]
# Every extra below is gated on a feature helix exposes at runtime. Core
# (`pip install helix-context`) gives a working HTTP server + /context
# + /context/packet with genome-local retrieval. Add extras as you turn
# on specific features.
accel = ["orjson>=3.9"]
embeddings = ["numpy", "sentence-transformers"]
cpu = ["spacy>=3.7"]
# MCP SDK — required for `python -m helix_context.mcp_server` (Claude Code,
# Cursor, Claude Desktop integration). Core install does NOT pull this;
# add the extra if you're a MCP host operator.
mcp = ["mcp>=1.0"]
# NLI / DeBERTa cross-encoder backends — only needed when
# `[ribosome] backend = "deberta"` or relation-graph NLI is enabled.
# `embeddings` already transitively pulls torch via sentence-transformers,
# so this is just the standalone path.
nli = ["torch>=2.0", "transformers>=4.30"]
# OpenTelemetry observability — traces + metrics + logs via OTLP gRPC.
# Required only when `HELIX_OTEL_ENABLED=1`; core helix runs without them.
otel = [
"opentelemetry-sdk>=1.27",
"opentelemetry-exporter-otlp-proto-grpc>=1.27",
"opentelemetry-instrumentation-fastapi>=0.48b0",
]
launcher = ["jinja2>=3.1", "psutil>=5.9"]
launcher-native = ["jinja2>=3.1", "psutil>=5.9", "pywebview>=5.0"]
# pystray is LGPL-3. It is a runtime-only optional dep that the user
# installs explicitly; the helix-context wheel itself does not bundle
# pystray, so the core package stays Apache-2.0-clean.
launcher-tray = ["jinja2>=3.1", "psutil>=5.9", "pystray>=0.19", "Pillow>=10"]
ast = [
"tree-sitter>=0.23",
"tree-sitter-python>=0.23",
"tree-sitter-rust>=0.23",
"tree-sitter-javascript>=0.23",
"tree-sitter-typescript>=0.23",
]
scorerift = ["scorerift"]
# CPU-resident semantic compression for gene content. Headroom by Tejas Chopra
# (https://github.com/chopratejas/headroom, Apache-2.0). See NOTICE.
codec = ["headroom-ai[proxy,code]>=0.5.21"]
dev = ["pytest", "pytest-asyncio"]
# `all` covers the full feature surface minus contributor-only extras
# (dev) and the LGPL launcher-tray variant. Dep bulk dominated by
# sentence-transformers + torch + spacy + tree-sitter + headroom-ai.
all = [
"orjson>=3.9",
"numpy",
"sentence-transformers",
"spacy>=3.7",
"mcp>=1.0",
"torch>=2.0",
"transformers>=4.30",
"opentelemetry-sdk>=1.27",
"opentelemetry-exporter-otlp-proto-grpc>=1.27",
"opentelemetry-instrumentation-fastapi>=0.48b0",
"jinja2>=3.1",
"psutil>=5.9",
"tree-sitter>=0.23",
"tree-sitter-python>=0.23",
"tree-sitter-rust>=0.23",
"tree-sitter-javascript>=0.23",
"tree-sitter-typescript>=0.23",
"headroom-ai[proxy,code]>=0.5.21",
]
[project.scripts]
helix = "helix_context.server:main"
helix-launcher = "helix_context.launcher.app:main"
helix-status = "helix_status:main"
[tool.pytest.ini_options]
testpaths = ["tests"]
markers = ["live: requires Ollama running with at least one model"]