-
Notifications
You must be signed in to change notification settings - Fork 0
Expand file tree
/
Copy path.env.example
More file actions
57 lines (43 loc) · 1.36 KB
/
.env.example
File metadata and controls
57 lines (43 loc) · 1.36 KB
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
# JoyClaw 配置文件示例
# 复制此文件为 .env 并填写您的配置
# ============================================
# LLM 提供商配置
# ============================================
# LLM 提供商选择 (openai, deepseek, zhipu, moonshot, ollama)
LLM_PROVIDER=deepseek
# LLM API Key (必填,ollama 除外)
LLM_API_KEY=your_api_key_here
# LLM Base URL (可选,不填则使用预设)
# LLM_BASE_URL=https://api.deepseek.com/v1
# LLM Model (可选,不填则使用预设默认模型)
# LLM_MODEL=deepseek-chat
# ============================================
# 各提供商配置示例
# ============================================
# OpenAI
# LLM_PROVIDER=openai
# LLM_API_KEY=sk-xxxxxxxxxxxxx
# LLM_MODEL=gpt-4o-mini
# DeepSeek (默认)
# LLM_PROVIDER=deepseek
# LLM_API_KEY=sk-xxxxxxxxxxxxx
# LLM_MODEL=deepseek-chat
# 智谱 AI (GLM)
# LLM_PROVIDER=zhipu
# LLM_API_KEY=xxxxxxxxxxxxx
# LLM_MODEL=glm-4
# 月之暗面 (Moonshot)
# LLM_PROVIDER=moonshot
# LLM_API_KEY=sk-xxxxxxxxxxxxx
# LLM_MODEL=moonshot-v1-8k
# Ollama (本地部署)
# LLM_PROVIDER=ollama
# LLM_MODEL=llama2
# LLM_BASE_URL=http://localhost:11434/v1
# ============================================
# 安全配置
# ============================================
# 允许执行的命令列表 (逗号分隔)
ALLOWED_COMMANDS=ls,cat,echo,pwd,whoami,date
# 最大文件大小 (字节)
MAX_FILE_SIZE=1048576