CodeReview/.env.example

110 lines
4.2 KiB
Plaintext
Raw Normal View History

# ========================================
# XCodeReviewer 环境变量配置示例
# ========================================
# 复制此文件为 .env 并填写你的配置
# ==================== LLM 通用配置 ====================
# 选择你想使用的LLM提供商 (gemini|openai|claude|qwen|deepseek|zhipu|moonshot|baidu|minimax|doubao|ollama)
VITE_LLM_PROVIDER=gemini
# 通用LLM配置 (可选,如果设置了这些,会覆盖下面的特定平台配置)
# VITE_LLM_API_KEY=your_api_key_here
# VITE_LLM_MODEL=your_model_name
# VITE_LLM_BASE_URL=https://custom-api-endpoint.com
# VITE_LLM_TIMEOUT=150000
# VITE_LLM_TEMPERATURE=0.2
# VITE_LLM_MAX_TOKENS=4096
# ==================== Google Gemini 配置 ====================
# 获取API Key: https://makersuite.google.com/app/apikey
# VITE_GEMINI_API_KEY=your_gemini_api_key_here
# VITE_GEMINI_MODEL=gemini-2.5-flash
# VITE_GEMINI_TIMEOUT_MS=150000
# ==================== OpenAI 配置 ====================
# 获取API Key: https://platform.openai.com/api-keys
# VITE_OPENAI_API_KEY=your_openai_api_key_here
# VITE_OPENAI_MODEL=gpt-4o-mini
# VITE_OPENAI_BASE_URL=https://api.openai.com/v1
# ==================== Anthropic Claude 配置 ====================
# 获取API Key: https://console.anthropic.com/
# VITE_CLAUDE_API_KEY=your_claude_api_key_here
# VITE_CLAUDE_MODEL=claude-3-5-sonnet-20241022
# ==================== 阿里云通义千问 配置 ====================
# 获取API Key: https://dashscope.console.aliyun.com/
# VITE_QWEN_API_KEY=your_qwen_api_key_here
# VITE_QWEN_MODEL=qwen-turbo
# ==================== DeepSeek 配置 ====================
# 获取API Key: https://platform.deepseek.com/
# VITE_DEEPSEEK_API_KEY=your_deepseek_api_key_here
# VITE_DEEPSEEK_MODEL=deepseek-chat
# ==================== 智谱AI (GLM) 配置 ====================
# 获取API Key: https://open.bigmodel.cn/
# VITE_ZHIPU_API_KEY=your_zhipu_api_key_here
# VITE_ZHIPU_MODEL=glm-4-flash
# ==================== 月之暗面 Kimi 配置 ====================
# 获取API Key: https://platform.moonshot.cn/
# VITE_MOONSHOT_API_KEY=your_moonshot_api_key_here
# VITE_MOONSHOT_MODEL=moonshot-v1-8k
# ==================== 百度文心一言 配置 ====================
# 获取API Key: https://console.bce.baidu.com/qianfan/
# 注意百度API Key格式为 "API_KEY:SECRET_KEY"
# VITE_BAIDU_API_KEY=your_api_key:your_secret_key
# VITE_BAIDU_MODEL=ERNIE-3.5-8K
# ==================== MiniMax 配置 ====================
# 获取API Key: https://www.minimaxi.com/
# VITE_MINIMAX_API_KEY=your_minimax_api_key_here
# VITE_MINIMAX_MODEL=abab6.5-chat
# ==================== 字节豆包 配置 ====================
# 获取API Key: https://console.volcengine.com/ark
# 注意豆包使用endpoint ID需要先创建推理接入点
# VITE_DOUBAO_API_KEY=your_doubao_api_key_here
# VITE_DOUBAO_MODEL=doubao-pro-32k
# ==================== Ollama 本地大模型配置 ====================
# Ollama 允许在本地运行开源大模型,无需 API Key
# 安装: https://ollama.com/
# 快速开始:
# 1. 安装 Ollama: curl -fsSL https://ollama.com/install.sh | sh
# 2. 下载模型: ollama pull llama3
# 3. 配置如下并启动应用
# VITE_OLLAMA_API_KEY=ollama # 本地运行不需要真实Key填写任意值
# VITE_OLLAMA_MODEL=llama3
# VITE_OLLAMA_BASE_URL=http://localhost:11434/v1
#
# 推荐模型:
# - llama3 (综合能力强,适合各种任务)
# - codellama (代码专用,适合代码审查)
# - qwen2.5:7b (中文支持好)
# - deepseek-coder (代码理解能力强)
# - phi3:mini (轻量级,速度快)
#
# 更多模型: https://ollama.com/library
# ==================== Supabase 数据库配置 (可选) ====================
# 如果不配置,系统将以演示模式运行,数据不会持久化
# 获取配置: https://supabase.com/
# VITE_SUPABASE_URL=https://your-project.supabase.co
# VITE_SUPABASE_ANON_KEY=your-anon-key-here
# ==================== GitHub 集成配置 (可选) ====================
# 用于仓库分析功能
# 获取Token: https://github.com/settings/tokens
# VITE_GITHUB_TOKEN=your_github_token_here
# ==================== 应用配置 ====================
VITE_APP_ID=xcodereviewer
# ==================== 代码分析配置 ====================
VITE_MAX_ANALYZE_FILES=40
VITE_LLM_CONCURRENCY=2
VITE_LLM_GAP_MS=500