-
Notifications
You must be signed in to change notification settings - Fork 0
Expand file tree
/
Copy pathconfig.py
More file actions
33 lines (26 loc) · 1.11 KB
/
config.py
File metadata and controls
33 lines (26 loc) · 1.11 KB
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
import os
from dotenv import load_dotenv
load_dotenv()
def _get_int_env(var_name: str, default: int, min_value: int = 1) -> int:
"""Parse integer env var safely with fallback to default.
Invalid or out-of-range values are ignored to keep startup stable.
"""
raw_value = os.getenv(var_name)
if raw_value is None:
return default
try:
parsed_value = int(raw_value)
if parsed_value < min_value:
return default
return parsed_value
except (TypeError, ValueError):
return default
TOKEN = os.getenv("BOT_TOKEN")
OPENAI_API_KEY = os.getenv("OPENAI_API_KEY")
GROQ_API_KEY = os.getenv("GROQ_API_KEY")
OPENAI_MODEL = os.getenv("OPENAI_MODEL", "gpt-4o-mini")
GROQ_MODEL = os.getenv("GROQ_MODEL", "llama-3.1-8b-instant")
LOCAL_LLM_MODEL_PATH = os.getenv("LOCAL_LLM_MODEL_PATH", "models/google-gemma-4b-it-Q4_K_M.gguf")
LOCAL_LLM_CONTEXT = _get_int_env("LOCAL_LLM_CONTEXT", default=4096, min_value=512)
LOCAL_LLM_THREADS = _get_int_env("LOCAL_LLM_THREADS", default=4, min_value=1)
LOCAL_LLM_MAX_TOKENS = _get_int_env("LOCAL_LLM_MAX_TOKENS", default=512, min_value=32)