feat(docker): Enhance Docker configuration for multi-provider LLM support

- Update Dockerfile to support dynamic build-time configuration for multiple LLM providers
- Add comprehensive build arguments for various AI service configurations
- Modify docker-compose.yml to pass environment-specific LLM configuration parameters
- Update .dockerignore to exclude README files during Docker build
- Improve environment variable handling for Vite build process
- Expand support for providers like Gemini, OpenAI, Claude, Ollama, and others
- Implement flexible configuration strategy for LLM services during container build
Enables more robust and configurable deployment of XCode Reviewer with enhanced LLM provider support and improved Docker build flexibility.
This commit is contained in:
lintsinghua 2025-10-24 16:40:35 +08:00
parent 70b198d28f
commit 5bbea7101c
3 changed files with 200 additions and 21 deletions

View File

@ -38,7 +38,8 @@ coverage
.nyc_output .nyc_output
# 文档 # 文档
*.md README.md
README_EN.md
docs docs
# 其他 # 其他

View File

@ -18,16 +18,141 @@ RUN npm config set registry https://registry.npmjs.org/ && \
npm config delete http-proxy 2>/dev/null || true && \ npm config delete http-proxy 2>/dev/null || true && \
npm install -g pnpm npm install -g pnpm
# 声明构建参数 - 这些参数可以在 docker build 时传入
# LLM 通用配置
ARG VITE_LLM_PROVIDER
ARG VITE_LLM_API_KEY
ARG VITE_LLM_MODEL
ARG VITE_LLM_BASE_URL
ARG VITE_LLM_TIMEOUT
ARG VITE_LLM_TEMPERATURE
ARG VITE_LLM_MAX_TOKENS
# Google Gemini 配置
ARG VITE_GEMINI_API_KEY
ARG VITE_GEMINI_MODEL
ARG VITE_GEMINI_TIMEOUT_MS
# OpenAI 配置
ARG VITE_OPENAI_API_KEY
ARG VITE_OPENAI_MODEL
ARG VITE_OPENAI_BASE_URL
# Claude 配置
ARG VITE_CLAUDE_API_KEY
ARG VITE_CLAUDE_MODEL
# 通义千问配置
ARG VITE_QWEN_API_KEY
ARG VITE_QWEN_MODEL
# DeepSeek 配置
ARG VITE_DEEPSEEK_API_KEY
ARG VITE_DEEPSEEK_MODEL
# 智谱AI 配置
ARG VITE_ZHIPU_API_KEY
ARG VITE_ZHIPU_MODEL
# Moonshot 配置
ARG VITE_MOONSHOT_API_KEY
ARG VITE_MOONSHOT_MODEL
# 百度文心一言配置
ARG VITE_BAIDU_API_KEY
ARG VITE_BAIDU_MODEL
# MiniMax 配置
ARG VITE_MINIMAX_API_KEY
ARG VITE_MINIMAX_MODEL
# 豆包配置
ARG VITE_DOUBAO_API_KEY
ARG VITE_DOUBAO_MODEL
# Ollama 配置
ARG VITE_OLLAMA_API_KEY
ARG VITE_OLLAMA_MODEL
ARG VITE_OLLAMA_BASE_URL
# Supabase 配置
ARG VITE_SUPABASE_URL
ARG VITE_SUPABASE_ANON_KEY
# GitHub 配置
ARG VITE_GITHUB_TOKEN
# 应用配置
ARG VITE_APP_ID
ARG VITE_MAX_ANALYZE_FILES
ARG VITE_LLM_CONCURRENCY
ARG VITE_LLM_GAP_MS
# 将构建参数转换为环境变量Vite 构建时会读取这些环境变量)
ENV VITE_LLM_PROVIDER=$VITE_LLM_PROVIDER
ENV VITE_LLM_API_KEY=$VITE_LLM_API_KEY
ENV VITE_LLM_MODEL=$VITE_LLM_MODEL
ENV VITE_LLM_BASE_URL=$VITE_LLM_BASE_URL
ENV VITE_LLM_TIMEOUT=$VITE_LLM_TIMEOUT
ENV VITE_LLM_TEMPERATURE=$VITE_LLM_TEMPERATURE
ENV VITE_LLM_MAX_TOKENS=$VITE_LLM_MAX_TOKENS
ENV VITE_GEMINI_API_KEY=$VITE_GEMINI_API_KEY
ENV VITE_GEMINI_MODEL=$VITE_GEMINI_MODEL
ENV VITE_GEMINI_TIMEOUT_MS=$VITE_GEMINI_TIMEOUT_MS
ENV VITE_OPENAI_API_KEY=$VITE_OPENAI_API_KEY
ENV VITE_OPENAI_MODEL=$VITE_OPENAI_MODEL
ENV VITE_OPENAI_BASE_URL=$VITE_OPENAI_BASE_URL
ENV VITE_CLAUDE_API_KEY=$VITE_CLAUDE_API_KEY
ENV VITE_CLAUDE_MODEL=$VITE_CLAUDE_MODEL
ENV VITE_QWEN_API_KEY=$VITE_QWEN_API_KEY
ENV VITE_QWEN_MODEL=$VITE_QWEN_MODEL
ENV VITE_DEEPSEEK_API_KEY=$VITE_DEEPSEEK_API_KEY
ENV VITE_DEEPSEEK_MODEL=$VITE_DEEPSEEK_MODEL
ENV VITE_ZHIPU_API_KEY=$VITE_ZHIPU_API_KEY
ENV VITE_ZHIPU_MODEL=$VITE_ZHIPU_MODEL
ENV VITE_MOONSHOT_API_KEY=$VITE_MOONSHOT_API_KEY
ENV VITE_MOONSHOT_MODEL=$VITE_MOONSHOT_MODEL
ENV VITE_BAIDU_API_KEY=$VITE_BAIDU_API_KEY
ENV VITE_BAIDU_MODEL=$VITE_BAIDU_MODEL
ENV VITE_MINIMAX_API_KEY=$VITE_MINIMAX_API_KEY
ENV VITE_MINIMAX_MODEL=$VITE_MINIMAX_MODEL
ENV VITE_DOUBAO_API_KEY=$VITE_DOUBAO_API_KEY
ENV VITE_DOUBAO_MODEL=$VITE_DOUBAO_MODEL
ENV VITE_OLLAMA_API_KEY=$VITE_OLLAMA_API_KEY
ENV VITE_OLLAMA_MODEL=$VITE_OLLAMA_MODEL
ENV VITE_OLLAMA_BASE_URL=$VITE_OLLAMA_BASE_URL
ENV VITE_SUPABASE_URL=$VITE_SUPABASE_URL
ENV VITE_SUPABASE_ANON_KEY=$VITE_SUPABASE_ANON_KEY
ENV VITE_GITHUB_TOKEN=$VITE_GITHUB_TOKEN
ENV VITE_APP_ID=$VITE_APP_ID
ENV VITE_MAX_ANALYZE_FILES=$VITE_MAX_ANALYZE_FILES
ENV VITE_LLM_CONCURRENCY=$VITE_LLM_CONCURRENCY
ENV VITE_LLM_GAP_MS=$VITE_LLM_GAP_MS
# 复制依赖文件 # 复制依赖文件
COPY package.json pnpm-lock.yaml ./ COPY package.json pnpm-lock.yaml ./
# 安装依赖 # 安装依赖
RUN pnpm install --no-frozen-lockfile RUN pnpm install --no-frozen-lockfile
# 复制项目文件(包括 .env # 复制项目文件(包括 .env,因为我们使用构建参数
COPY . . COPY . .
# 构建应用(环境变量会在构建时被读取) # 构建应用(环境变量会在构建时被 Vite 读取并硬编码到代码中
RUN pnpm build RUN pnpm build
# 生产阶段 - 使用 nginx 提供静态文件服务 # 生产阶段 - 使用 nginx 提供静态文件服务

View File

@ -4,20 +4,69 @@ services:
build: build:
context: . context: .
dockerfile: Dockerfile dockerfile: Dockerfile
container_name: xcodereviewer-app # 构建参数 - 从 .env 文件或环境变量传入
ports: args:
- "5174:80" # LLM 通用配置
environment: - VITE_LLM_PROVIDER=${VITE_LLM_PROVIDER:-gemini}
# Google Gemini AI 配置 (必需) - VITE_LLM_API_KEY=${VITE_LLM_API_KEY}
- VITE_LLM_MODEL=${VITE_LLM_MODEL}
- VITE_LLM_BASE_URL=${VITE_LLM_BASE_URL}
- VITE_LLM_TIMEOUT=${VITE_LLM_TIMEOUT:-150000}
- VITE_LLM_TEMPERATURE=${VITE_LLM_TEMPERATURE:-0.2}
- VITE_LLM_MAX_TOKENS=${VITE_LLM_MAX_TOKENS:-4096}
# Google Gemini 配置
- VITE_GEMINI_API_KEY=${VITE_GEMINI_API_KEY} - VITE_GEMINI_API_KEY=${VITE_GEMINI_API_KEY}
- VITE_GEMINI_MODEL=${VITE_GEMINI_MODEL:-gemini-2.5-flash} - VITE_GEMINI_MODEL=${VITE_GEMINI_MODEL:-gemini-2.5-flash}
- VITE_GEMINI_TIMEOUT_MS=${VITE_GEMINI_TIMEOUT_MS:-25000} - VITE_GEMINI_TIMEOUT_MS=${VITE_GEMINI_TIMEOUT_MS:-25000}
# Supabase 配置 (可选) # OpenAI 配置
- VITE_OPENAI_API_KEY=${VITE_OPENAI_API_KEY}
- VITE_OPENAI_MODEL=${VITE_OPENAI_MODEL:-gpt-4o-mini}
- VITE_OPENAI_BASE_URL=${VITE_OPENAI_BASE_URL}
# Claude 配置
- VITE_CLAUDE_API_KEY=${VITE_CLAUDE_API_KEY}
- VITE_CLAUDE_MODEL=${VITE_CLAUDE_MODEL:-claude-3-5-sonnet-20241022}
# 通义千问配置
- VITE_QWEN_API_KEY=${VITE_QWEN_API_KEY}
- VITE_QWEN_MODEL=${VITE_QWEN_MODEL:-qwen-turbo}
# DeepSeek 配置
- VITE_DEEPSEEK_API_KEY=${VITE_DEEPSEEK_API_KEY}
- VITE_DEEPSEEK_MODEL=${VITE_DEEPSEEK_MODEL:-deepseek-chat}
# 智谱AI 配置
- VITE_ZHIPU_API_KEY=${VITE_ZHIPU_API_KEY}
- VITE_ZHIPU_MODEL=${VITE_ZHIPU_MODEL:-glm-4-flash}
# Moonshot 配置
- VITE_MOONSHOT_API_KEY=${VITE_MOONSHOT_API_KEY}
- VITE_MOONSHOT_MODEL=${VITE_MOONSHOT_MODEL:-moonshot-v1-8k}
# 百度文心一言配置
- VITE_BAIDU_API_KEY=${VITE_BAIDU_API_KEY}
- VITE_BAIDU_MODEL=${VITE_BAIDU_MODEL:-ERNIE-3.5-8K}
# MiniMax 配置
- VITE_MINIMAX_API_KEY=${VITE_MINIMAX_API_KEY}
- VITE_MINIMAX_MODEL=${VITE_MINIMAX_MODEL:-abab6.5-chat}
# 豆包配置
- VITE_DOUBAO_API_KEY=${VITE_DOUBAO_API_KEY}
- VITE_DOUBAO_MODEL=${VITE_DOUBAO_MODEL:-doubao-pro-32k}
# Ollama 配置
- VITE_OLLAMA_API_KEY=${VITE_OLLAMA_API_KEY:-ollama}
- VITE_OLLAMA_MODEL=${VITE_OLLAMA_MODEL:-llama3}
- VITE_OLLAMA_BASE_URL=${VITE_OLLAMA_BASE_URL:-http://localhost:11434/v1}
# Supabase 配置
- VITE_SUPABASE_URL=${VITE_SUPABASE_URL} - VITE_SUPABASE_URL=${VITE_SUPABASE_URL}
- VITE_SUPABASE_ANON_KEY=${VITE_SUPABASE_ANON_KEY} - VITE_SUPABASE_ANON_KEY=${VITE_SUPABASE_ANON_KEY}
# GitHub 集成 (可选) # GitHub 配置
- VITE_GITHUB_TOKEN=${VITE_GITHUB_TOKEN} - VITE_GITHUB_TOKEN=${VITE_GITHUB_TOKEN}
# 应用配置 # 应用配置
@ -25,6 +74,10 @@ services:
- VITE_MAX_ANALYZE_FILES=${VITE_MAX_ANALYZE_FILES:-40} - VITE_MAX_ANALYZE_FILES=${VITE_MAX_ANALYZE_FILES:-40}
- VITE_LLM_CONCURRENCY=${VITE_LLM_CONCURRENCY:-2} - VITE_LLM_CONCURRENCY=${VITE_LLM_CONCURRENCY:-2}
- VITE_LLM_GAP_MS=${VITE_LLM_GAP_MS:-500} - VITE_LLM_GAP_MS=${VITE_LLM_GAP_MS:-500}
container_name: xcodereviewer-app
ports:
- "5174:80"
restart: unless-stopped restart: unless-stopped
networks: networks:
- xcodereviewer-network - xcodereviewer-network