mirror of
https://github.com/fawney19/Aether.git
synced 2026-01-05 17:22:28 +08:00
Compare commits
4 Commits
15a9b88fc8
...
v0.1.22
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
03ad16ea8a | ||
|
|
2fa64b98e3 | ||
|
|
75d7e89cbb | ||
|
|
d73a443484 |
@@ -105,7 +105,7 @@ RUN printf '%s\n' \
|
|||||||
'stderr_logfile=/var/log/nginx/error.log' \
|
'stderr_logfile=/var/log/nginx/error.log' \
|
||||||
'' \
|
'' \
|
||||||
'[program:app]' \
|
'[program:app]' \
|
||||||
'command=gunicorn src.main:app -w %(ENV_GUNICORN_WORKERS)s -k uvicorn.workers.UvicornWorker --bind 0.0.0.0:%(ENV_PORT)s --timeout 120 --access-logfile - --error-logfile - --log-level info' \
|
'command=gunicorn src.main:app --preload -w %(ENV_GUNICORN_WORKERS)s -k uvicorn.workers.UvicornWorker --bind 0.0.0.0:%(ENV_PORT)s --timeout 120 --access-logfile - --error-logfile - --log-level info' \
|
||||||
'directory=/app' \
|
'directory=/app' \
|
||||||
'autostart=true' \
|
'autostart=true' \
|
||||||
'autorestart=true' \
|
'autorestart=true' \
|
||||||
|
|||||||
@@ -106,7 +106,7 @@ RUN printf '%s\n' \
|
|||||||
'stderr_logfile=/var/log/nginx/error.log' \
|
'stderr_logfile=/var/log/nginx/error.log' \
|
||||||
'' \
|
'' \
|
||||||
'[program:app]' \
|
'[program:app]' \
|
||||||
'command=gunicorn src.main:app -w %(ENV_GUNICORN_WORKERS)s -k uvicorn.workers.UvicornWorker --bind 0.0.0.0:%(ENV_PORT)s --timeout 120 --access-logfile - --error-logfile - --log-level info' \
|
'command=gunicorn src.main:app --preload -w %(ENV_GUNICORN_WORKERS)s -k uvicorn.workers.UvicornWorker --bind 0.0.0.0:%(ENV_PORT)s --timeout 120 --access-logfile - --error-logfile - --log-level info' \
|
||||||
'directory=/app' \
|
'directory=/app' \
|
||||||
'autostart=true' \
|
'autostart=true' \
|
||||||
'autorestart=true' \
|
'autorestart=true' \
|
||||||
|
|||||||
@@ -394,6 +394,10 @@ def upgrade() -> None:
|
|||||||
index=True,
|
index=True,
|
||||||
),
|
),
|
||||||
)
|
)
|
||||||
|
# usage 表复合索引(优化常见查询)
|
||||||
|
op.create_index("idx_usage_user_created", "usage", ["user_id", "created_at"])
|
||||||
|
op.create_index("idx_usage_apikey_created", "usage", ["api_key_id", "created_at"])
|
||||||
|
op.create_index("idx_usage_provider_model_created", "usage", ["provider", "model", "created_at"])
|
||||||
|
|
||||||
# ==================== user_quotas ====================
|
# ==================== user_quotas ====================
|
||||||
op.create_table(
|
op.create_table(
|
||||||
|
|||||||
@@ -18,33 +18,35 @@ depends_on = None
|
|||||||
def upgrade() -> None:
|
def upgrade() -> None:
|
||||||
"""为 usage 表添加复合索引以优化常见查询
|
"""为 usage 表添加复合索引以优化常见查询
|
||||||
|
|
||||||
使用 CONCURRENTLY 创建索引以避免锁表,
|
注意:这些索引已经在 baseline 迁移中创建。
|
||||||
但需要在 AUTOCOMMIT 模式下执行(不能在事务内)
|
此迁移仅用于从旧版本升级的场景,新安装会跳过。
|
||||||
"""
|
"""
|
||||||
conn = op.get_bind()
|
conn = op.get_bind()
|
||||||
engine = conn.engine
|
|
||||||
|
|
||||||
# 使用新连接并设置 AUTOCOMMIT 模式以支持 CREATE INDEX CONCURRENTLY
|
# 检查 usage 表是否存在
|
||||||
with engine.connect().execution_options(isolation_level="AUTOCOMMIT") as autocommit_conn:
|
result = conn.execute(text(
|
||||||
# 使用 IF NOT EXISTS 避免重复创建,无需单独检查索引是否存在
|
"SELECT EXISTS (SELECT FROM information_schema.tables WHERE table_name = 'usage')"
|
||||||
|
))
|
||||||
|
if not result.scalar():
|
||||||
|
# 表不存在,跳过
|
||||||
|
return
|
||||||
|
|
||||||
# 1. user_id + created_at 复合索引 (用户用量查询)
|
# 定义需要创建的索引
|
||||||
autocommit_conn.execute(text(
|
indexes = [
|
||||||
"CREATE INDEX CONCURRENTLY IF NOT EXISTS idx_usage_user_created "
|
("idx_usage_user_created", "ON usage (user_id, created_at)"),
|
||||||
"ON usage (user_id, created_at)"
|
("idx_usage_apikey_created", "ON usage (api_key_id, created_at)"),
|
||||||
|
("idx_usage_provider_model_created", "ON usage (provider, model, created_at)"),
|
||||||
|
]
|
||||||
|
|
||||||
|
# 分别检查并创建每个索引
|
||||||
|
for index_name, index_def in indexes:
|
||||||
|
result = conn.execute(text(
|
||||||
|
f"SELECT EXISTS (SELECT 1 FROM pg_indexes WHERE indexname = '{index_name}')"
|
||||||
))
|
))
|
||||||
|
if result.scalar():
|
||||||
|
continue # 索引已存在,跳过
|
||||||
|
|
||||||
# 2. api_key_id + created_at 复合索引 (API Key 用量查询)
|
conn.execute(text(f"CREATE INDEX {index_name} {index_def}"))
|
||||||
autocommit_conn.execute(text(
|
|
||||||
"CREATE INDEX CONCURRENTLY IF NOT EXISTS idx_usage_apikey_created "
|
|
||||||
"ON usage (api_key_id, created_at)"
|
|
||||||
))
|
|
||||||
|
|
||||||
# 3. provider + model + created_at 复合索引 (模型统计查询)
|
|
||||||
autocommit_conn.execute(text(
|
|
||||||
"CREATE INDEX CONCURRENTLY IF NOT EXISTS idx_usage_provider_model_created "
|
|
||||||
"ON usage (provider, model, created_at)"
|
|
||||||
))
|
|
||||||
|
|
||||||
|
|
||||||
def downgrade() -> None:
|
def downgrade() -> None:
|
||||||
|
|||||||
@@ -26,10 +26,13 @@ calc_deps_hash() {
|
|||||||
cat pyproject.toml frontend/package.json frontend/package-lock.json Dockerfile.base.local 2>/dev/null | md5sum | cut -d' ' -f1
|
cat pyproject.toml frontend/package.json frontend/package-lock.json Dockerfile.base.local 2>/dev/null | md5sum | cut -d' ' -f1
|
||||||
}
|
}
|
||||||
|
|
||||||
# 计算代码文件的哈希值
|
# 计算代码文件的哈希值(包含 Dockerfile.app.local)
|
||||||
calc_code_hash() {
|
calc_code_hash() {
|
||||||
find src -type f -name "*.py" 2>/dev/null | sort | xargs cat 2>/dev/null | md5sum | cut -d' ' -f1
|
{
|
||||||
find frontend/src -type f \( -name "*.vue" -o -name "*.ts" -o -name "*.tsx" -o -name "*.js" \) 2>/dev/null | sort | xargs cat 2>/dev/null | md5sum | cut -d' ' -f1
|
cat Dockerfile.app.local 2>/dev/null
|
||||||
|
find src -type f -name "*.py" 2>/dev/null | sort | xargs cat 2>/dev/null
|
||||||
|
find frontend/src -type f \( -name "*.vue" -o -name "*.ts" -o -name "*.tsx" -o -name "*.js" \) 2>/dev/null | sort | xargs cat 2>/dev/null
|
||||||
|
} | md5sum | cut -d' ' -f1
|
||||||
}
|
}
|
||||||
|
|
||||||
# 计算迁移文件的哈希值
|
# 计算迁移文件的哈希值
|
||||||
|
|||||||
@@ -208,86 +208,120 @@ class CleanupScheduler:
|
|||||||
return
|
return
|
||||||
|
|
||||||
# 非首次运行,检查最近是否有缺失的日期需要回填
|
# 非首次运行,检查最近是否有缺失的日期需要回填
|
||||||
latest_stat = db.query(StatsDaily).order_by(StatsDaily.date.desc()).first()
|
from src.models.database import StatsDailyModel
|
||||||
|
|
||||||
if latest_stat:
|
yesterday_business_date = today_local.date() - timedelta(days=1)
|
||||||
latest_date_utc = latest_stat.date
|
max_backfill_days: int = SystemConfigService.get_config(
|
||||||
if latest_date_utc.tzinfo is None:
|
db, "max_stats_backfill_days", 30
|
||||||
latest_date_utc = latest_date_utc.replace(tzinfo=timezone.utc)
|
) or 30
|
||||||
else:
|
|
||||||
latest_date_utc = latest_date_utc.astimezone(timezone.utc)
|
|
||||||
|
|
||||||
# 使用业务日期计算缺失区间(避免用 UTC 年月日导致日期偏移,且对 DST 更安全)
|
# 计算回填检查的起始日期
|
||||||
latest_business_date = latest_date_utc.astimezone(app_tz).date()
|
check_start_date = yesterday_business_date - timedelta(
|
||||||
yesterday_business_date = today_local.date() - timedelta(days=1)
|
days=max_backfill_days - 1
|
||||||
missing_start_date = latest_business_date + timedelta(days=1)
|
)
|
||||||
|
|
||||||
if missing_start_date <= yesterday_business_date:
|
# 获取 StatsDaily 和 StatsDailyModel 中已有数据的日期集合
|
||||||
missing_days = (
|
existing_daily_dates = set()
|
||||||
yesterday_business_date - missing_start_date
|
existing_model_dates = set()
|
||||||
).days + 1
|
|
||||||
|
|
||||||
# 限制最大回填天数,防止停机很久后一次性回填太多
|
daily_stats = (
|
||||||
max_backfill_days: int = SystemConfigService.get_config(
|
db.query(StatsDaily.date)
|
||||||
db, "max_stats_backfill_days", 30
|
.filter(StatsDaily.date >= check_start_date.isoformat())
|
||||||
) or 30
|
.all()
|
||||||
if missing_days > max_backfill_days:
|
)
|
||||||
logger.warning(
|
for (stat_date,) in daily_stats:
|
||||||
f"缺失 {missing_days} 天数据超过最大回填限制 "
|
if stat_date.tzinfo is None:
|
||||||
f"{max_backfill_days} 天,只回填最近 {max_backfill_days} 天"
|
stat_date = stat_date.replace(tzinfo=timezone.utc)
|
||||||
|
existing_daily_dates.add(stat_date.astimezone(app_tz).date())
|
||||||
|
|
||||||
|
model_stats = (
|
||||||
|
db.query(StatsDailyModel.date)
|
||||||
|
.filter(StatsDailyModel.date >= check_start_date.isoformat())
|
||||||
|
.distinct()
|
||||||
|
.all()
|
||||||
|
)
|
||||||
|
for (stat_date,) in model_stats:
|
||||||
|
if stat_date.tzinfo is None:
|
||||||
|
stat_date = stat_date.replace(tzinfo=timezone.utc)
|
||||||
|
existing_model_dates.add(stat_date.astimezone(app_tz).date())
|
||||||
|
|
||||||
|
# 找出需要回填的日期
|
||||||
|
all_dates = set()
|
||||||
|
current = check_start_date
|
||||||
|
while current <= yesterday_business_date:
|
||||||
|
all_dates.add(current)
|
||||||
|
current += timedelta(days=1)
|
||||||
|
|
||||||
|
# 需要回填 StatsDaily 的日期
|
||||||
|
missing_daily_dates = all_dates - existing_daily_dates
|
||||||
|
# 需要回填 StatsDailyModel 的日期
|
||||||
|
missing_model_dates = all_dates - existing_model_dates
|
||||||
|
# 合并所有需要处理的日期
|
||||||
|
dates_to_process = missing_daily_dates | missing_model_dates
|
||||||
|
|
||||||
|
if dates_to_process:
|
||||||
|
sorted_dates = sorted(dates_to_process)
|
||||||
|
logger.info(
|
||||||
|
f"检测到 {len(dates_to_process)} 天的统计数据需要回填 "
|
||||||
|
f"(StatsDaily 缺失 {len(missing_daily_dates)} 天, "
|
||||||
|
f"StatsDailyModel 缺失 {len(missing_model_dates)} 天)"
|
||||||
|
)
|
||||||
|
|
||||||
|
users = (
|
||||||
|
db.query(DBUser.id).filter(DBUser.is_active.is_(True)).all()
|
||||||
|
)
|
||||||
|
|
||||||
|
failed_dates = 0
|
||||||
|
failed_users = 0
|
||||||
|
|
||||||
|
for current_date in sorted_dates:
|
||||||
|
try:
|
||||||
|
current_date_local = datetime.combine(
|
||||||
|
current_date, datetime.min.time(), tzinfo=app_tz
|
||||||
)
|
)
|
||||||
missing_start_date = yesterday_business_date - timedelta(
|
# 只在缺失时才聚合对应的表
|
||||||
days=max_backfill_days - 1
|
if current_date in missing_daily_dates:
|
||||||
)
|
|
||||||
missing_days = max_backfill_days
|
|
||||||
|
|
||||||
logger.info(
|
|
||||||
f"检测到缺失 {missing_days} 天的统计数据 "
|
|
||||||
f"({missing_start_date} ~ {yesterday_business_date}),开始回填..."
|
|
||||||
)
|
|
||||||
|
|
||||||
current_date = missing_start_date
|
|
||||||
users = (
|
|
||||||
db.query(DBUser.id).filter(DBUser.is_active.is_(True)).all()
|
|
||||||
)
|
|
||||||
|
|
||||||
while current_date <= yesterday_business_date:
|
|
||||||
try:
|
|
||||||
current_date_local = datetime.combine(
|
|
||||||
current_date, datetime.min.time(), tzinfo=app_tz
|
|
||||||
)
|
|
||||||
StatsAggregatorService.aggregate_daily_stats(
|
StatsAggregatorService.aggregate_daily_stats(
|
||||||
db, current_date_local
|
db, current_date_local
|
||||||
)
|
)
|
||||||
|
if current_date in missing_model_dates:
|
||||||
StatsAggregatorService.aggregate_daily_model_stats(
|
StatsAggregatorService.aggregate_daily_model_stats(
|
||||||
db, current_date_local
|
db, current_date_local
|
||||||
)
|
)
|
||||||
for (user_id,) in users:
|
# 用户统计在任一缺失时都回填
|
||||||
try:
|
for (user_id,) in users:
|
||||||
StatsAggregatorService.aggregate_user_daily_stats(
|
|
||||||
db, user_id, current_date_local
|
|
||||||
)
|
|
||||||
except Exception as e:
|
|
||||||
logger.warning(
|
|
||||||
f"回填用户 {user_id} 日期 {current_date} 失败: {e}"
|
|
||||||
)
|
|
||||||
try:
|
|
||||||
db.rollback()
|
|
||||||
except Exception:
|
|
||||||
pass
|
|
||||||
except Exception as e:
|
|
||||||
logger.warning(f"回填日期 {current_date} 失败: {e}")
|
|
||||||
try:
|
try:
|
||||||
db.rollback()
|
StatsAggregatorService.aggregate_user_daily_stats(
|
||||||
except Exception:
|
db, user_id, current_date_local
|
||||||
pass
|
)
|
||||||
|
except Exception as e:
|
||||||
|
failed_users += 1
|
||||||
|
logger.warning(
|
||||||
|
f"回填用户 {user_id} 日期 {current_date} 失败: {e}"
|
||||||
|
)
|
||||||
|
try:
|
||||||
|
db.rollback()
|
||||||
|
except Exception as rollback_err:
|
||||||
|
logger.error(f"回滚失败: {rollback_err}")
|
||||||
|
except Exception as e:
|
||||||
|
failed_dates += 1
|
||||||
|
logger.warning(f"回填日期 {current_date} 失败: {e}")
|
||||||
|
try:
|
||||||
|
db.rollback()
|
||||||
|
except Exception as rollback_err:
|
||||||
|
logger.error(f"回滚失败: {rollback_err}")
|
||||||
|
|
||||||
current_date += timedelta(days=1)
|
StatsAggregatorService.update_summary(db)
|
||||||
|
|
||||||
StatsAggregatorService.update_summary(db)
|
if failed_dates > 0 or failed_users > 0:
|
||||||
logger.info(f"缺失数据回填完成,共 {missing_days} 天")
|
logger.warning(
|
||||||
|
f"回填完成,共处理 {len(dates_to_process)} 天,"
|
||||||
|
f"失败: {failed_dates} 天, {failed_users} 个用户记录"
|
||||||
|
)
|
||||||
else:
|
else:
|
||||||
logger.info("统计数据已是最新,无需回填")
|
logger.info(f"缺失数据回填完成,共处理 {len(dates_to_process)} 天")
|
||||||
|
else:
|
||||||
|
logger.info("统计数据已是最新,无需回填")
|
||||||
return
|
return
|
||||||
|
|
||||||
# 定时任务:聚合昨天的数据
|
# 定时任务:聚合昨天的数据
|
||||||
|
|||||||
Reference in New Issue
Block a user