230 lines
6.2 KiB
Plaintext
230 lines
6.2 KiB
Plaintext
# =============================================
|
||
# DeepAudit Backend 配置文件
|
||
# =============================================
|
||
# 复制此文件为 .env 并填入实际配置
|
||
|
||
# =============================================
|
||
# 数据库配置
|
||
# =============================================
|
||
# PostgreSQL 数据库连接配置
|
||
# Docker Compose 部署时使用 db 作为服务器地址
|
||
POSTGRES_SERVER=localhost
|
||
POSTGRES_USER=postgres
|
||
POSTGRES_PASSWORD=postgres
|
||
POSTGRES_DB=deepaudit
|
||
|
||
# 完整数据库连接字符串(可选,会覆盖上述配置)
|
||
# DATABASE_URL=postgresql+asyncpg://postgres:postgres@localhost/deepaudit
|
||
|
||
# =============================================
|
||
# 安全配置
|
||
# =============================================
|
||
# JWT 签名密钥 - 生产环境必须修改为随机字符串!
|
||
# 建议使用: openssl rand -hex 32
|
||
SECRET_KEY=your-super-secret-key-change-this-in-production
|
||
|
||
# JWT 加密算法
|
||
ALGORITHM=HS256
|
||
|
||
# Token 过期时间(分钟),默认 8 天
|
||
ACCESS_TOKEN_EXPIRE_MINUTES=11520
|
||
|
||
# =============================================
|
||
# LLM 通用配置
|
||
# =============================================
|
||
# 支持的 provider:
|
||
# - LiteLLM 适配器: openai, gemini, claude, qwen, deepseek, zhipu, moonshot, ollama
|
||
# - 原生适配器: baidu, minimax, doubao
|
||
LLM_PROVIDER=openai
|
||
|
||
# API 密钥
|
||
LLM_API_KEY=sk-your-api-key
|
||
|
||
# 模型名称(留空使用 provider 默认模型)
|
||
# OpenAI: gpt-4o-mini, gpt-4o, gpt-3.5-turbo
|
||
# Gemini: gemini-2.0-flash, gemini-1.5-pro
|
||
# Claude: claude-3-5-sonnet-20241022, claude-3-haiku-20240307
|
||
# Qwen: qwen-turbo, qwen-plus, qwen-max
|
||
# DeepSeek: deepseek-chat, deepseek-coder
|
||
# Zhipu: glm-4-flash, glm-4
|
||
# Moonshot: moonshot-v1-8k, moonshot-v1-32k
|
||
# Ollama: llama3, codellama, qwen2.5, deepseek-coder
|
||
LLM_MODEL=
|
||
|
||
# 自定义 API 端点(API 中转站)
|
||
# 示例: https://your-proxy.com/v1
|
||
LLM_BASE_URL=
|
||
|
||
# 请求超时时间(秒)
|
||
LLM_TIMEOUT=150
|
||
|
||
# 生成温度(0-1,越低越确定性)
|
||
LLM_TEMPERATURE=0.7
|
||
|
||
# 最大生成 Token 数
|
||
LLM_MAX_TOKENS=30000
|
||
|
||
# =============================================
|
||
# 各平台独立配置(可选)
|
||
# =============================================
|
||
# 如果需要同时配置多个平台,可以单独设置
|
||
# 运行时可通过 /admin 页面切换
|
||
|
||
# OpenAI
|
||
# OPENAI_API_KEY=sk-xxx
|
||
# OPENAI_BASE_URL=https://api.openai.com/v1
|
||
|
||
# Google Gemini
|
||
# GEMINI_API_KEY=xxx
|
||
|
||
# Anthropic Claude
|
||
# CLAUDE_API_KEY=sk-ant-xxx
|
||
|
||
# 阿里云通义千问
|
||
# QWEN_API_KEY=sk-xxx
|
||
|
||
# DeepSeek
|
||
# DEEPSEEK_API_KEY=sk-xxx
|
||
|
||
# 智谱 AI
|
||
# ZHIPU_API_KEY=xxx
|
||
|
||
# 月之暗面 Kimi
|
||
# MOONSHOT_API_KEY=sk-xxx
|
||
|
||
# 百度文心一言(格式: api_key:secret_key)
|
||
# BAIDU_API_KEY=your_api_key:your_secret_key
|
||
|
||
# MiniMax
|
||
# MINIMAX_API_KEY=xxx
|
||
|
||
# 字节豆包
|
||
# DOUBAO_API_KEY=xxx
|
||
|
||
# Ollama 本地模型
|
||
# OLLAMA_BASE_URL=http://localhost:11434/v1
|
||
|
||
# =============================================
|
||
# Agent 审计配置 (Multi-Agent v3.0.0 核心必须)
|
||
# =============================================
|
||
# Agent 审计开关(必须开启,是核心功能)
|
||
AGENT_ENABLED=true
|
||
|
||
# Agent 最大迭代次数
|
||
AGENT_MAX_ITERATIONS=5
|
||
|
||
# Agent 单次审计超时时间(秒)
|
||
AGENT_TIMEOUT=1800
|
||
|
||
# Redis 配置(Agent 任务队列 - 必须)
|
||
# Docker Compose 部署时使用 redis 作为服务器地址
|
||
REDIS_URL=redis://localhost:6379/0
|
||
|
||
# =============================================
|
||
# 嵌入模型配置(RAG 功能,独立于主 LLM)
|
||
# =============================================
|
||
# 嵌入模型 provider: openai, ollama, cohere, huggingface
|
||
EMBEDDING_PROVIDER=openai
|
||
|
||
# 嵌入模型名称
|
||
# OpenAI: text-embedding-3-small, text-embedding-3-large, text-embedding-ada-002
|
||
# Ollama: nomic-embed-text, mxbai-embed-large
|
||
EMBEDDING_MODEL=text-embedding-3-small
|
||
|
||
# 嵌入模型维度
|
||
EMBEDDING_DIMENSION=2560
|
||
|
||
# 嵌入模型 API Key(留空则使用 LLM_API_KEY)
|
||
EMBEDDING_API_KEY=
|
||
|
||
# 嵌入模型 Base URL(留空则使用默认值)
|
||
EMBEDDING_BASE_URL=
|
||
|
||
# =============================================
|
||
# 向量数据库配置(RAG 功能)
|
||
# =============================================
|
||
# 向量数据库类型: chroma
|
||
VECTOR_DB_TYPE=chroma
|
||
|
||
# ChromaDB 配置(本地模式)
|
||
CHROMA_PERSIST_DIRECTORY=./data/chroma
|
||
|
||
# =============================================
|
||
# 沙箱配置(漏洞验证 - 核心必须)
|
||
# =============================================
|
||
# 沙箱功能开关(必须开启,是漏洞验证的核心组件)
|
||
SANDBOX_ENABLED=true
|
||
|
||
# 沙箱 Docker 镜像
|
||
# 构建方式 1: docker compose build sandbox
|
||
# 构建方式 2: cd docker/sandbox && ./build.sh
|
||
SANDBOX_IMAGE=deepaudit/sandbox:latest
|
||
|
||
# 沙箱内存限制
|
||
SANDBOX_MEMORY_LIMIT=512m
|
||
|
||
# 沙箱 CPU 限制(核心数)
|
||
SANDBOX_CPU_LIMIT=1.0
|
||
|
||
# 是否禁用沙箱网络(安全建议开启)
|
||
SANDBOX_NETWORK_DISABLED=true
|
||
|
||
# 沙箱执行超时时间(秒)
|
||
SANDBOX_TIMEOUT=30
|
||
|
||
# =============================================
|
||
# Git 仓库配置
|
||
# =============================================
|
||
# GitHub Personal Access Token
|
||
# 获取地址: https://github.com/settings/tokens
|
||
# 权限要求: repo (私有仓库) 或 public_repo (公开仓库)
|
||
GITHUB_TOKEN=
|
||
|
||
# GitLab Personal Access Token
|
||
# 获取地址: https://gitlab.com/-/profile/personal_access_tokens
|
||
# 权限要求: read_repository
|
||
GITLAB_TOKEN=
|
||
|
||
# Gitea Access Token
|
||
# 获取地址: https://[your-gitea-instance]/user/settings/applications
|
||
# 权限要求: read_repository
|
||
GITEA_TOKEN=
|
||
|
||
# Gitea Webhook Secret
|
||
GITEA_WEBHOOK_SECRET=
|
||
|
||
# =============================================
|
||
# 扫描配置
|
||
# =============================================
|
||
# 单次扫描最大文件数
|
||
MAX_ANALYZE_FILES=50
|
||
|
||
# 单文件最大大小(字节),默认 200KB
|
||
MAX_FILE_SIZE_BYTES=204800
|
||
|
||
# LLM 并发请求数(注意 API 限流)
|
||
LLM_CONCURRENCY=3
|
||
|
||
# LLM 请求间隔(毫秒),避免触发限流
|
||
LLM_GAP_MS=2000
|
||
|
||
# =============================================
|
||
# 存储配置
|
||
# =============================================
|
||
# ZIP 文件存储目录
|
||
ZIP_STORAGE_PATH=./uploads/zip_files
|
||
|
||
# =============================================
|
||
# 输出配置
|
||
# =============================================
|
||
# 分析结果输出语言
|
||
# zh-CN: 中文
|
||
# en-US: 英文
|
||
OUTPUT_LANGUAGE=zh-CN
|
||
|
||
# Gitea 配置
|
||
# Gitea Host URL,用于自动在 PR 下面回复
|
||
GITEA_HOST_URL=
|
||
|
||
# AI-Bot 账号 Issue/PR 读写权限 Token
|
||
GITEA_BOT_TOKEN= |