149 lines
3.9 KiB
Plaintext
149 lines
3.9 KiB
Plaintext
# =============================================
|
||
# DeepAudit Backend 配置文件
|
||
# =============================================
|
||
# 复制此文件为 .env 并填入实际配置
|
||
# 详细说明请参阅 docs/CONFIGURATION.md
|
||
|
||
# =============================================
|
||
# 数据库配置
|
||
# =============================================
|
||
# PostgreSQL 数据库连接配置
|
||
# Docker Compose 部署时使用 db 作为服务器地址
|
||
POSTGRES_SERVER=localhost
|
||
POSTGRES_USER=postgres
|
||
POSTGRES_PASSWORD=postgres
|
||
POSTGRES_DB=deepaudit
|
||
|
||
# 完整数据库连接字符串(可选,会覆盖上述配置)
|
||
# DATABASE_URL=postgresql+asyncpg://postgres:postgres@localhost/deepaudit
|
||
|
||
# =============================================
|
||
# 安全配置
|
||
# =============================================
|
||
# JWT 签名密钥 - 生产环境必须修改为随机字符串!
|
||
# 建议使用: openssl rand -hex 32
|
||
SECRET_KEY=your-super-secret-key-change-this-in-production
|
||
|
||
# JWT 加密算法
|
||
ALGORITHM=HS256
|
||
|
||
# Token 过期时间(分钟),默认 8 天
|
||
ACCESS_TOKEN_EXPIRE_MINUTES=11520
|
||
|
||
# =============================================
|
||
# LLM 通用配置
|
||
# =============================================
|
||
# 支持的 provider:
|
||
# - LiteLLM 适配器: openai, gemini, claude, qwen, deepseek, zhipu, moonshot, ollama
|
||
# - 原生适配器: baidu, minimax, doubao
|
||
LLM_PROVIDER=openai
|
||
|
||
# API 密钥
|
||
LLM_API_KEY=sk-your-api-key
|
||
|
||
# 模型名称(留空使用 provider 默认模型)
|
||
# OpenAI: gpt-4o-mini, gpt-4o, gpt-3.5-turbo
|
||
# Gemini: gemini-2.0-flash, gemini-1.5-pro
|
||
# Claude: claude-3-5-sonnet-20241022, claude-3-haiku-20240307
|
||
# Qwen: qwen-turbo, qwen-plus, qwen-max
|
||
# DeepSeek: deepseek-chat, deepseek-coder
|
||
# Zhipu: glm-4-flash, glm-4
|
||
# Moonshot: moonshot-v1-8k, moonshot-v1-32k
|
||
# Ollama: llama3, codellama, qwen2.5, deepseek-coder
|
||
LLM_MODEL=
|
||
|
||
# 自定义 API 端点(API 中转站)
|
||
# 示例: https://your-proxy.com/v1
|
||
LLM_BASE_URL=
|
||
|
||
# 请求超时时间(秒)
|
||
LLM_TIMEOUT=150
|
||
|
||
# 生成温度(0-1,越低越确定性)
|
||
LLM_TEMPERATURE=0.1
|
||
|
||
# 最大生成 Token 数
|
||
LLM_MAX_TOKENS=4096
|
||
|
||
# =============================================
|
||
# 各平台独立配置(可选)
|
||
# =============================================
|
||
# 如果需要同时配置多个平台,可以单独设置
|
||
# 运行时可通过 /admin 页面切换
|
||
|
||
# OpenAI
|
||
# OPENAI_API_KEY=sk-xxx
|
||
# OPENAI_BASE_URL=https://api.openai.com/v1
|
||
|
||
# Google Gemini
|
||
# GEMINI_API_KEY=xxx
|
||
|
||
# Anthropic Claude
|
||
# CLAUDE_API_KEY=sk-ant-xxx
|
||
|
||
# 阿里云通义千问
|
||
# QWEN_API_KEY=sk-xxx
|
||
|
||
# DeepSeek
|
||
# DEEPSEEK_API_KEY=sk-xxx
|
||
|
||
# 智谱 AI
|
||
# ZHIPU_API_KEY=xxx
|
||
|
||
# 月之暗面 Kimi
|
||
# MOONSHOT_API_KEY=sk-xxx
|
||
|
||
# 百度文心一言(格式: api_key:secret_key)
|
||
# BAIDU_API_KEY=your_api_key:your_secret_key
|
||
|
||
# MiniMax
|
||
# MINIMAX_API_KEY=xxx
|
||
|
||
# 字节豆包
|
||
# DOUBAO_API_KEY=xxx
|
||
|
||
# Ollama 本地模型
|
||
# OLLAMA_BASE_URL=http://localhost:11434/v1
|
||
|
||
# =============================================
|
||
# Git 仓库配置
|
||
# =============================================
|
||
# GitHub Personal Access Token
|
||
# 获取地址: https://github.com/settings/tokens
|
||
# 权限要求: repo (私有仓库) 或 public_repo (公开仓库)
|
||
GITHUB_TOKEN=
|
||
|
||
# GitLab Personal Access Token
|
||
# 获取地址: https://gitlab.com/-/profile/personal_access_tokens
|
||
# 权限要求: read_repository
|
||
GITLAB_TOKEN=
|
||
|
||
# =============================================
|
||
# 扫描配置
|
||
# =============================================
|
||
# 单次扫描最大文件数
|
||
MAX_ANALYZE_FILES=50
|
||
|
||
# 单文件最大大小(字节),默认 200KB
|
||
MAX_FILE_SIZE_BYTES=204800
|
||
|
||
# LLM 并发请求数(注意 API 限流)
|
||
LLM_CONCURRENCY=3
|
||
|
||
# LLM 请求间隔(毫秒),避免触发限流
|
||
LLM_GAP_MS=2000
|
||
|
||
# =============================================
|
||
# 存储配置
|
||
# =============================================
|
||
# ZIP 文件存储目录
|
||
ZIP_STORAGE_PATH=./uploads/zip_files
|
||
|
||
# =============================================
|
||
# 输出配置
|
||
# =============================================
|
||
# 分析结果输出语言
|
||
# zh-CN: 中文
|
||
# en-US: 英文
|
||
OUTPUT_LANGUAGE=zh-CN
|