refactor(project): 重构项目文档并优化代码结构
- 移除旧的文档结构和内容,清理 root 目录下的 markdown 文件 - 删除 GitHub Pages 部署配置和相关文件 - 移除 .env.example 文件,使用 Doppler 进行环境变量管理 - 更新 README.md,增加对 OpenBB 数据的支持 - 重构 streamlit_app.py,移除 Swarm 模式相关代码 - 更新 Doppler 配置管理模块,增加对 .env 文件的支持 - 删除 Memory Bank 实验和测试脚本 - 清理内部文档和开发计划
This commit is contained in:
76
scripts/api_health_check.py
Normal file
76
scripts/api_health_check.py
Normal file
@@ -0,0 +1,76 @@
|
||||
#!/usr/bin/env python3
|
||||
"""
|
||||
API健康检查模块
|
||||
用于测试与外部服务的连接,如OpenRouter和RapidAPI。
|
||||
"""
|
||||
|
||||
import os
|
||||
import requests
|
||||
import sys
|
||||
from pathlib import Path
|
||||
|
||||
# 将项目根目录添加到Python路径,以便导入config模块
|
||||
project_root = Path(__file__).parent.parent
|
||||
sys.path.insert(0, str(project_root))
|
||||
|
||||
from config.doppler_config import get_openrouter_key, get_rapidapi_key
|
||||
|
||||
def test_openrouter_api() -> bool:
|
||||
"""
|
||||
测试与OpenRouter API的连接和认证。
|
||||
"""
|
||||
api_key = get_openrouter_key()
|
||||
if not api_key:
|
||||
print("❌ OpenRouter API Key not found.")
|
||||
return False
|
||||
|
||||
url = "https://openrouter.ai/api/v1/models"
|
||||
headers = {"Authorization": f"Bearer {api_key}"}
|
||||
|
||||
try:
|
||||
response = requests.get(url, headers=headers, timeout=10)
|
||||
if response.status_code == 200:
|
||||
print("✅ OpenRouter API connection successful.")
|
||||
return True
|
||||
else:
|
||||
print(f"❌ OpenRouter API connection failed. Status: {response.status_code}, Response: {response.text[:100]}")
|
||||
return False
|
||||
except requests.RequestException as e:
|
||||
print(f"❌ OpenRouter API request failed: {e}")
|
||||
return False
|
||||
|
||||
def test_rapidapi_connection() -> bool:
|
||||
"""
|
||||
测试与RapidAPI的连接和认证。
|
||||
这里我们使用一个简单的、免费的API端点进行测试。
|
||||
"""
|
||||
api_key = get_rapidapi_key()
|
||||
if not api_key:
|
||||
print("❌ RapidAPI Key not found.")
|
||||
return False
|
||||
|
||||
# 使用一个通用的、通常可用的RapidAPI端点进行测试
|
||||
url = "https://alpha-vantage.p.rapidapi.com/query"
|
||||
querystring = {"function":"TOP_GAINERS_LOSERS"}
|
||||
headers = {
|
||||
"x-rapidapi-host": "alpha-vantage.p.rapidapi.com",
|
||||
"x-rapidapi-key": api_key
|
||||
}
|
||||
|
||||
try:
|
||||
response = requests.get(url, headers=headers, params=querystring, timeout=15)
|
||||
# Alpha Vantage的免费套餐可能会返回错误,但只要RapidAPI认证通过,状态码就不是401或403
|
||||
if response.status_code not in [401, 403]:
|
||||
print(f"✅ RapidAPI connection successful (Status: {response.status_code}).")
|
||||
return True
|
||||
else:
|
||||
print(f"❌ RapidAPI authentication failed. Status: {response.status_code}, Response: {response.text[:100]}")
|
||||
return False
|
||||
except requests.RequestException as e:
|
||||
print(f"❌ RapidAPI request failed: {e}")
|
||||
return False
|
||||
|
||||
if __name__ == "__main__":
|
||||
print("🩺 Running API Health Checks...")
|
||||
test_openrouter_api()
|
||||
test_rapidapi_connection()
|
||||
137
scripts/validate_doc_lifecycle.py
Normal file
137
scripts/validate_doc_lifecycle.py
Normal file
@@ -0,0 +1,137 @@
|
||||
import glob
|
||||
import frontmatter
|
||||
import datetime
|
||||
import argparse
|
||||
from pathlib import Path
|
||||
|
||||
# --- Configuration ---
|
||||
# Directories to exclude from scanning
|
||||
EXCLUDE_DIRS = ['venv', 'node_modules', '.git']
|
||||
# Default metadata template for the --fix option
|
||||
DEFAULT_METADATA_TEMPLATE = {
|
||||
'title': "Default Title",
|
||||
'status': "spring",
|
||||
'owner': "TBD",
|
||||
'created': datetime.date.today().strftime('%Y-%m-%d'),
|
||||
'review_by': (datetime.date.today() + datetime.timedelta(days=180)).strftime('%Y-%m-%d'),
|
||||
'tags': ["untagged"]
|
||||
}
|
||||
|
||||
def get_project_files(project_root):
|
||||
"""Get all markdown files, respecting exclusions."""
|
||||
all_files = project_root.rglob('*.md')
|
||||
filtered_files = []
|
||||
for file_path in all_files:
|
||||
if not any(excluded_dir in file_path.parts for excluded_dir in EXCLUDE_DIRS):
|
||||
filtered_files.append(str(file_path))
|
||||
return filtered_files
|
||||
|
||||
def add_default_frontmatter(file_path):
|
||||
"""Adds a default YAML front matter block to a file that lacks one."""
|
||||
try:
|
||||
with open(file_path, 'r+', encoding='utf-8') as f:
|
||||
content = f.read()
|
||||
f.seek(0, 0)
|
||||
|
||||
# Create a new post object with default metadata and existing content
|
||||
new_post = frontmatter.Post(content, **DEFAULT_METADATA_TEMPLATE)
|
||||
|
||||
# Write the serialized post (metadata + content) back to the file
|
||||
f.write(frontmatter.dumps(new_post))
|
||||
print(f"[FIXED] {file_path}: Added default front matter.")
|
||||
return True
|
||||
except Exception as e:
|
||||
print(f"[CRITICAL] {file_path}: Could not apply fix. Error: {e}")
|
||||
return False
|
||||
|
||||
def validate_doc_lifecycle(fix_missing=False):
|
||||
"""
|
||||
Scans and validates markdown files, with an option to fix files missing front matter.
|
||||
"""
|
||||
project_root = Path(__file__).parent.parent
|
||||
markdown_files = get_project_files(project_root)
|
||||
|
||||
print(f"Scanning {len(markdown_files)} Markdown files (vendor directories excluded)...")
|
||||
|
||||
all_docs = []
|
||||
errors = []
|
||||
warnings = []
|
||||
fixed_count = 0
|
||||
|
||||
for md_file in markdown_files:
|
||||
try:
|
||||
post = frontmatter.load(md_file)
|
||||
metadata = post.metadata
|
||||
|
||||
if not metadata:
|
||||
if fix_missing:
|
||||
if add_default_frontmatter(md_file):
|
||||
fixed_count += 1
|
||||
else:
|
||||
warnings.append(f"[SKIPPED] {md_file}: No YAML front matter found. Use --fix to add a template.")
|
||||
continue
|
||||
|
||||
doc_info = {'path': md_file}
|
||||
|
||||
required_fields = ['title', 'status', 'owner', 'created', 'review_by']
|
||||
missing_fields = [field for field in required_fields if field not in metadata]
|
||||
if missing_fields:
|
||||
errors.append(f"[ERROR] {md_file}: Missing required fields: {', '.join(missing_fields)}")
|
||||
continue
|
||||
|
||||
doc_info.update(metadata)
|
||||
|
||||
allowed_statuses = ['spring', 'summer', 'autumn', 'winter']
|
||||
if metadata.get('status') not in allowed_statuses:
|
||||
errors.append(f"[ERROR] {md_file}: Invalid status '{metadata.get('status')}'. Must be one of {allowed_statuses}")
|
||||
|
||||
review_by_date = metadata.get('review_by')
|
||||
if review_by_date:
|
||||
if isinstance(review_by_date, str):
|
||||
review_by_date = datetime.datetime.strptime(review_by_date, '%Y-%m-%d').date()
|
||||
|
||||
if review_by_date < datetime.date.today():
|
||||
warnings.append(f"[WARNING] {md_file}: Review date ({review_by_date}) has passed.")
|
||||
|
||||
all_docs.append(doc_info)
|
||||
|
||||
except Exception as e:
|
||||
errors.append(f"[CRITICAL] {md_file}: Could not parse file. Error: {e}")
|
||||
|
||||
print("\n--- Validation Report ---")
|
||||
|
||||
if not errors and not warnings:
|
||||
print("✅ All documents with front matter are valid and up-to-date.")
|
||||
|
||||
if warnings:
|
||||
print("\n⚠️ Warnings:")
|
||||
for warning in warnings:
|
||||
print(warning)
|
||||
|
||||
if errors:
|
||||
print("\n❌ Errors:")
|
||||
for error in errors:
|
||||
print(error)
|
||||
|
||||
print(f"\n--- Summary ---")
|
||||
print(f"Total files scanned: {len(markdown_files)}")
|
||||
print(f"Files with valid front matter: {len(all_docs)}")
|
||||
if fix_missing:
|
||||
print(f"Files automatically fixed: {fixed_count}")
|
||||
print(f"Warnings: {len(warnings)}")
|
||||
print(f"Errors: {len(errors)}")
|
||||
|
||||
return len(errors) == 0
|
||||
|
||||
if __name__ == "__main__":
|
||||
parser = argparse.ArgumentParser(description="Validate and manage the lifecycle of Markdown documents.")
|
||||
parser.add_argument(
|
||||
'--fix',
|
||||
action='store_true',
|
||||
help="Automatically add a default front matter template to any document that is missing one."
|
||||
)
|
||||
args = parser.parse_args()
|
||||
|
||||
is_valid = validate_doc_lifecycle(fix_missing=args.fix)
|
||||
if not is_valid:
|
||||
exit(1)
|
||||
Reference in New Issue
Block a user