🏗️ 项目重构:模块化清理完成

This commit is contained in:
llama-research
2025-09-01 12:29:27 +00:00
parent ef7657101a
commit f9856c31e5
349 changed files with 41438 additions and 254 deletions

View File

@@ -0,0 +1,76 @@
#!/usr/bin/env python3
"""
API健康检查模块
用于测试与外部服务的连接如OpenRouter和RapidAPI。
"""
import os
import requests
import sys
from pathlib import Path
# 将项目根目录添加到Python路径以便导入config模块
project_root = Path(__file__).parent.parent
sys.path.insert(0, str(project_root))
from config.settings import get_openrouter_key, get_rapidapi_key
def test_openrouter_api() -> bool:
"""
测试与OpenRouter API的连接和认证。
"""
api_key = get_openrouter_key()
if not api_key:
print("❌ OpenRouter API Key not found.")
return False
url = "https://openrouter.ai/api/v1/models"
headers = {"Authorization": f"Bearer {api_key}"}
try:
response = requests.get(url, headers=headers, timeout=10)
if response.status_code == 200:
print("✅ OpenRouter API connection successful.")
return True
else:
print(f"❌ OpenRouter API connection failed. Status: {response.status_code}, Response: {response.text[:100]}")
return False
except requests.RequestException as e:
print(f"❌ OpenRouter API request failed: {e}")
return False
def test_rapidapi_connection() -> bool:
"""
测试与RapidAPI的连接和认证。
这里我们使用一个简单的、免费的API端点进行测试。
"""
api_key = get_rapidapi_key()
if not api_key:
print("❌ RapidAPI Key not found.")
return False
# 使用一个通用的、通常可用的RapidAPI端点进行测试
url = "https://alpha-vantage.p.rapidapi.com/query"
querystring = {"function":"TOP_GAINERS_LOSERS"}
headers = {
"x-rapidapi-host": "alpha-vantage.p.rapidapi.com",
"x-rapidapi-key": api_key
}
try:
response = requests.get(url, headers=headers, params=querystring, timeout=15)
# Alpha Vantage的免费套餐可能会返回错误但只要RapidAPI认证通过状态码就不是401或403
if response.status_code not in [401, 403]:
print(f"✅ RapidAPI connection successful (Status: {response.status_code}).")
return True
else:
print(f"❌ RapidAPI authentication failed. Status: {response.status_code}, Response: {response.text[:100]}")
return False
except requests.RequestException as e:
print(f"❌ RapidAPI request failed: {e}")
return False
if __name__ == "__main__":
print("🩺 Running API Health Checks...")
test_openrouter_api()
test_rapidapi_connection()

View File

@@ -0,0 +1,33 @@
#!/bin/bash
# 环境状态检查脚本
echo "📊 环境状态检查"
echo "=================="
# Git 状态
echo "Git 状态:"
git status --short
echo ""
# 远程仓库状态
echo "远程仓库状态:"
git remote -v
echo ""
# 分支状态
echo "分支状态:"
git branch -a
echo ""
# 最新标签
echo "最新标签:"
git tag --sort=-version:refname | head -5
echo ""
# 提交历史
echo "最近提交:"
git log --oneline -5

View File

@@ -0,0 +1,35 @@
#!/bin/bash
# 快速发布脚本
VERSION=$1
ENV=$2
if [ -z "$VERSION" ] || [ -z "$ENV" ]; then
echo "用法: ./quick-release.sh <版本号> <环境>"
echo "环境选项: dev/staging/prod"
exit 1
fi
case $ENV in
canary)
git checkout main
git tag "v${VERSION}-canary"
git push canary main --tags
;;
dev)
git checkout main
git tag "v${VERSION}-dev"
git push dev main --tags
;;
beta)
git checkout main
git tag "v${VERSION}-beta"
git push beta main --tags
;;
*)
echo "无效的环境选项: canary/dev/beta"
exit 1
;;
esac
echo "✅ 发布完成: v${VERSION}-${ENV}"

View File

@@ -0,0 +1,35 @@
#!/bin/bash
# 快速回滚脚本
ENV=$1
VERSION=$2
if [ -z "$ENV" ] || [ -z "$VERSION" ]; then
echo "用法: ./rollback.sh <环境> <版本号>"
echo "环境选项: staging/prod"
exit 1
fi
case $ENV in
canary)
git checkout main
git reset --hard "v${VERSION}-canary"
git push canary main --force
;;
dev)
git checkout main
git reset --hard "v${VERSION}-dev"
git push dev main --force
;;
beta)
git checkout main
git reset --hard "v${VERSION}-beta"
git push beta main --force
;;
*)
echo "无效的环境选项: canary/dev/beta"
exit 1
;;
esac
echo "✅ 回滚完成: ${ENV} -> v${VERSION}"

View File

@@ -0,0 +1,229 @@
#!/bin/bash
# 六壬神鉴渐进发布环境配置脚本
set -e
echo "🚀 配置渐进发布环境..."
# 1. 配置 Git 别名简化操作
echo "配置 Git 别名..."
git config alias.deploy-staging '!git push staging staging:main'
git config alias.deploy-prod '!git push origin main'
git config alias.sync-all '!git fetch --all && git push --all'
git config alias.release-start '!git checkout develop && git pull && git checkout -b release/'
git config alias.release-finish '!git checkout main && git merge staging && git tag -a'
# 2. 创建发布分支
echo "创建发布分支..."
git checkout -b staging 2>/dev/null || git checkout staging
git checkout -b develop 2>/dev/null || git checkout develop
# 3. 推送分支到所有远程仓库
echo "推送分支到所有远程仓库..."
git push origin staging:staging 2>/dev/null || true
git push origin develop:develop 2>/dev/null || true
git push staging staging:main 2>/dev/null || true
git push staging develop:develop 2>/dev/null || true
# 4. 设置分支保护(需要管理员权限)
echo "设置分支保护规则..."
echo "⚠️ 请在 GitHub/GitLab/Gitea 后台手动设置以下分支保护:"
echo "- main 分支:需要 PR 审查,禁止直接推送"
echo "- staging 分支:需要 PR 审查,禁止直接推送"
echo "- develop 分支:需要 PR 审查,禁止直接推送"
# 5. 创建发布标签模板
echo "创建发布标签模板..."
cat > .gitmessage.txt << 'EOF'
# 发布标签模板
# 格式v主版本.次版本.修订版本-环境
#
# 示例:
# v1.2.0-canary (灰度发布)
# v1.2.0 (正式版本)
# v1.2.1-hotfix (热修复)
#
# 环境标识:
# -canary: 灰度发布
# -staging: 预发布测试
# -hotfix: 紧急修复
# 无后缀:正式版本
发布类型: [feature/bugfix/hotfix/docs]
影响范围: [core/api/ui/config]
测试状态: [passed/failed/pending]
回滚策略: [已准备/无需回滚]
EOF
git config commit.template .gitmessage.txt
# 6. 创建快速发布脚本
cat > scripts/quick-release.sh << 'EOF'
#!/bin/bash
# 快速发布脚本
VERSION=$1
ENV=$2
if [ -z "$VERSION" ] || [ -z "$ENV" ]; then
echo "用法: ./quick-release.sh <版本号> <环境>"
echo "环境选项: dev/staging/prod"
exit 1
fi
case $ENV in
dev)
git checkout develop
git tag "v${VERSION}-dev"
git push gitea develop --tags
;;
staging)
git checkout staging
git tag "v${VERSION}-staging"
git push staging staging:main --tags
;;
prod)
git checkout main
git tag "v${VERSION}"
git push origin main --tags
;;
*)
echo "无效的环境选项"
exit 1
;;
esac
echo "✅ 发布完成: v${VERSION}-${ENV}"
EOF
chmod +x scripts/quick-release.sh
# 7. 创建回滚脚本
cat > scripts/rollback.sh << 'EOF'
#!/bin/bash
# 快速回滚脚本
ENV=$1
VERSION=$2
if [ -z "$ENV" ] || [ -z "$VERSION" ]; then
echo "用法: ./rollback.sh <环境> <版本号>"
echo "环境选项: staging/prod"
exit 1
fi
case $ENV in
staging)
git checkout staging
git reset --hard "v${VERSION}-staging"
git push staging staging:main --force
;;
prod)
git checkout main
git reset --hard "v${VERSION}"
git push origin main --force
;;
*)
echo "无效的环境选项"
exit 1
;;
esac
echo "✅ 回滚完成: ${ENV} -> v${VERSION}"
EOF
chmod +x scripts/rollback.sh
# 8. 创建状态检查脚本
cat > scripts/check-status.sh << 'EOF'
#!/bin/bash
# 环境状态检查脚本
echo "📊 环境状态检查"
echo "=================="
# Git 状态
echo "Git 状态:"
git status --short
echo ""
# 远程仓库状态
echo "远程仓库状态:"
git remote -v
echo ""
# 分支状态
echo "分支状态:"
git branch -a
echo ""
# 最新标签
echo "最新标签:"
git tag --sort=-version:refname | head -5
echo ""
# 提交历史
echo "最近提交:"
git log --oneline -5
EOF
chmod +x scripts/check-status.sh
# 9. 创建 GitHub Actions 工作流目录
mkdir -p .github/workflows
# 10. 创建部署验证
echo "创建部署验证..."
cat > .github/workflows/deploy-validation.yml << 'EOF'
name: Deploy Validation
on:
push:
branches: [develop, staging, main]
jobs:
validate:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v3
- name: Set up Python
uses: actions/setup-python@v4
with:
python-version: '3.9'
- name: Install dependencies
run: |
python -m pip install --upgrade pip
pip install -r requirements.txt
- name: Run tests
run: |
python -m pytest tests/ -v
- name: Validate code style
run: |
pip install black flake8
black --check .
flake8 .
- name: Security scan
run: |
pip install safety bandit
safety check
bandit -r . -f json -o security-report.json
EOF
echo "✅ 渐进发布环境配置完成!"
echo ""
echo "📋 使用指南:"
echo "1. 查看状态: ./scripts/check-status.sh"
echo "2. 快速发布: ./scripts/quick-release.sh 1.0.0 staging"
echo "3. 紧急回滚: ./scripts/rollback.sh prod 1.0.0"
echo "4. Git 别名: git deploy-staging, git deploy-prod"
echo ""
echo "📚 详细文档: docs/development/GRADUAL_DEPLOYMENT_PLAN.md"

View File

@@ -0,0 +1,54 @@
#!/bin/bash
# 炼妖壶 (Lianyaohu) - 虚拟环境设置脚本
# 用于快速初始化项目开发环境
set -e # 遇到错误时退出
echo "🔧 开始设置炼妖壶项目虚拟环境..."
# 检查Python版本
echo "📋 检查Python版本..."
python3 --version
# 创建虚拟环境(如果不存在)
if [ ! -d "venv" ]; then
echo "🏗️ 创建虚拟环境..."
python3 -m venv venv
else
echo "✅ 虚拟环境已存在"
fi
# 激活虚拟环境
echo "🚀 激活虚拟环境..."
source venv/bin/activate
# 升级pip
echo "⬆️ 升级pip..."
pip install --upgrade pip
# 安装项目依赖
echo "📦 安装项目依赖..."
pip install -r requirements.txt
# 检查关键依赖
echo "🔍 检查关键依赖安装状态..."
echo " - streamlit: $(pip show streamlit | grep Version || echo '未安装')"
echo " - openai: $(pip show openai | grep Version || echo '未安装')"
echo " - google-cloud-aiplatform: $(pip show google-cloud-aiplatform | grep Version || echo '未安装')"
echo " - aiohttp: $(pip show aiohttp | grep Version || echo '未安装')"
echo "✨ 虚拟环境设置完成!"
echo ""
echo "📝 使用说明:"
echo " 1. 激活虚拟环境: source venv/bin/activate"
echo " 2. 运行辩论系统: python examples/debates/baxian_adk_gemini_debate.py"
echo " 3. 启动Web界面: streamlit run app.py (如果有)"
echo " 4. 退出虚拟环境: deactivate"
echo ""
echo "🔧 环境变量配置:"
echo " 请确保 .env 文件中配置了必要的API密钥"
echo " - GOOGLE_API_KEY (Google Gemini API)"
echo " - GOOGLE_CLOUD_PROJECT_ID (GCP项目ID)"
echo " - GOOGLE_CLOUD_LOCATION (GCP区域)"
echo ""
echo "🎉 准备就绪开始你的AI辩论之旅吧"

View File

@@ -0,0 +1,68 @@
#!/bin/bash
# Memory Bank Web界面启动脚本
# 自动设置环境并启动Streamlit应用
echo "🧠 启动Memory Bank Web界面..."
echo "================================"
# 检查是否在正确的目录
if [ ! -f "memory_bank_web_interface.py" ]; then
echo "❌ 错误: 未找到memory_bank_web_interface.py文件"
echo "请确保在正确的项目目录中运行此脚本"
exit 1
fi
# 检查虚拟环境
if [ ! -d "venv" ]; then
echo "📦 创建虚拟环境..."
python3 -m venv venv
fi
# 激活虚拟环境
echo "🔧 激活虚拟环境..."
source venv/bin/activate
# 检查并安装依赖
echo "📋 检查依赖包..."
# 检查streamlit
if ! python -c "import streamlit" 2>/dev/null; then
echo "📦 安装Streamlit..."
pip install streamlit
fi
# 检查Google Cloud依赖
if ! python -c "import google.cloud" 2>/dev/null; then
echo "📦 安装Google Cloud依赖..."
pip install google-cloud-aiplatform google-generativeai
fi
# 检查其他必要依赖
if ! python -c "import asyncio" 2>/dev/null; then
echo "📦 安装asyncio依赖..."
pip install asyncio
fi
# 检查Google Cloud认证
echo "🔐 检查Google Cloud认证..."
if ! gcloud auth application-default print-access-token >/dev/null 2>&1; then
echo "⚠️ 未检测到Google Cloud认证"
echo "正在启动认证流程..."
gcloud auth application-default login
fi
# 设置环境变量
export GOOGLE_CLOUD_PROJECT="inner-radius-469712-e9"
export GOOGLE_CLOUD_REGION="us-central1"
# 启动Streamlit应用
echo "🚀 启动Web界面..."
echo "================================"
echo "📱 Web界面将在浏览器中打开"
echo "🌐 默认地址: http://localhost:8501"
echo "⏹️ 按 Ctrl+C 停止服务"
echo "================================"
# 启动streamlit
streamlit run memory_bank_web_interface.py --server.port 8501 --server.address localhost

View File

@@ -0,0 +1,137 @@
import glob
import frontmatter
import datetime
import argparse
from pathlib import Path
# --- Configuration ---
# Directories to exclude from scanning
EXCLUDE_DIRS = ['venv', 'node_modules', '.git']
# Default metadata template for the --fix option
DEFAULT_METADATA_TEMPLATE = {
'title': "Default Title",
'status': "spring",
'owner': "TBD",
'created': datetime.date.today().strftime('%Y-%m-%d'),
'review_by': (datetime.date.today() + datetime.timedelta(days=180)).strftime('%Y-%m-%d'),
'tags': ["untagged"]
}
def get_project_files(project_root):
"""Get all markdown files, respecting exclusions."""
all_files = project_root.rglob('*.md')
filtered_files = []
for file_path in all_files:
if not any(excluded_dir in file_path.parts for excluded_dir in EXCLUDE_DIRS):
filtered_files.append(str(file_path))
return filtered_files
def add_default_frontmatter(file_path):
"""Adds a default YAML front matter block to a file that lacks one."""
try:
with open(file_path, 'r+', encoding='utf-8') as f:
content = f.read()
f.seek(0, 0)
# Create a new post object with default metadata and existing content
new_post = frontmatter.Post(content, **DEFAULT_METADATA_TEMPLATE)
# Write the serialized post (metadata + content) back to the file
f.write(frontmatter.dumps(new_post))
print(f"[FIXED] {file_path}: Added default front matter.")
return True
except Exception as e:
print(f"[CRITICAL] {file_path}: Could not apply fix. Error: {e}")
return False
def validate_doc_lifecycle(fix_missing=False):
"""
Scans and validates markdown files, with an option to fix files missing front matter.
"""
project_root = Path(__file__).parent.parent
markdown_files = get_project_files(project_root)
print(f"Scanning {len(markdown_files)} Markdown files (vendor directories excluded)...")
all_docs = []
errors = []
warnings = []
fixed_count = 0
for md_file in markdown_files:
try:
post = frontmatter.load(md_file)
metadata = post.metadata
if not metadata:
if fix_missing:
if add_default_frontmatter(md_file):
fixed_count += 1
else:
warnings.append(f"[SKIPPED] {md_file}: No YAML front matter found. Use --fix to add a template.")
continue
doc_info = {'path': md_file}
required_fields = ['title', 'status', 'owner', 'created', 'review_by']
missing_fields = [field for field in required_fields if field not in metadata]
if missing_fields:
errors.append(f"[ERROR] {md_file}: Missing required fields: {', '.join(missing_fields)}")
continue
doc_info.update(metadata)
allowed_statuses = ['spring', 'summer', 'autumn', 'winter']
if metadata.get('status') not in allowed_statuses:
errors.append(f"[ERROR] {md_file}: Invalid status '{metadata.get('status')}'. Must be one of {allowed_statuses}")
review_by_date = metadata.get('review_by')
if review_by_date:
if isinstance(review_by_date, str):
review_by_date = datetime.datetime.strptime(review_by_date, '%Y-%m-%d').date()
if review_by_date < datetime.date.today():
warnings.append(f"[WARNING] {md_file}: Review date ({review_by_date}) has passed.")
all_docs.append(doc_info)
except Exception as e:
errors.append(f"[CRITICAL] {md_file}: Could not parse file. Error: {e}")
print("\n--- Validation Report ---")
if not errors and not warnings:
print("✅ All documents with front matter are valid and up-to-date.")
if warnings:
print("\n⚠️ Warnings:")
for warning in warnings:
print(warning)
if errors:
print("\n❌ Errors:")
for error in errors:
print(error)
print(f"\n--- Summary ---")
print(f"Total files scanned: {len(markdown_files)}")
print(f"Files with valid front matter: {len(all_docs)}")
if fix_missing:
print(f"Files automatically fixed: {fixed_count}")
print(f"Warnings: {len(warnings)}")
print(f"Errors: {len(errors)}")
return len(errors) == 0
if __name__ == "__main__":
parser = argparse.ArgumentParser(description="Validate and manage the lifecycle of Markdown documents.")
parser.add_argument(
'--fix',
action='store_true',
help="Automatically add a default front matter template to any document that is missing one."
)
args = parser.parse_args()
is_valid = validate_doc_lifecycle(fix_missing=args.fix)
if not is_valid:
exit(1)