2 Commits

Author SHA1 Message Date
fawney19
9dad194130 fix: 修复 API Key 访问限制字段无法清除的问题
- 统一前端创建和更新 API Key 时的空数组处理逻辑
- 后端创建和更新接口都支持空数组转 NULL(表示不限制)
- 开启自动刷新时立即刷新一次数据
2025-12-24 22:35:30 +08:00
fawney19
03ad16ea8a fix: 修复迁移脚本在全新安装时报错及改进统计回填逻辑
迁移脚本修复:
- 移除 AUTOCOMMIT 模式,改为在同一事务中创建索引
- 分别检查每个索引是否存在,只创建缺失的索引
- 修复全新安装时 AUTOCOMMIT 连接看不到未提交表的问题 (#46)

统计回填改进:
- 分别检查 StatsDaily 和 StatsDailyModel 的缺失日期
- 只回填实际缺失的数据而非连续区间
- 添加失败统计计数和 rollback 错误日志
2025-12-24 21:50:05 +08:00
5 changed files with 147 additions and 113 deletions

View File

@@ -18,49 +18,35 @@ depends_on = None
def upgrade() -> None: def upgrade() -> None:
"""为 usage 表添加复合索引以优化常见查询 """为 usage 表添加复合索引以优化常见查询
使用 CONCURRENTLY 创建索引以避免锁表, 注意:这些索引已经在 baseline 迁移中创建。
但需要在 AUTOCOMMIT 模式下执行(不能在事务内) 此迁移仅用于从旧版本升级的场景,新安装会跳过。
注意如果是从全新数据库执行baseline 刚创建表),
由于 AUTOCOMMIT 连接看不到事务中未提交的表,会跳过索引创建。
这种情况下索引会在下次迁移或手动创建。
""" """
conn = op.get_bind() conn = op.get_bind()
engine = conn.engine
# 使用新连接并设置 AUTOCOMMIT 模式以支持 CREATE INDEX CONCURRENTLY # 检查 usage 表是否存在
with engine.connect().execution_options(isolation_level="AUTOCOMMIT") as autocommit_conn: result = conn.execute(text(
# 检查 usage 表是否存在(在 AUTOCOMMIT 连接中可见)
# 如果表不存在(例如 baseline 迁移还在事务中),跳过索引创建
result = autocommit_conn.execute(text(
"SELECT EXISTS (SELECT FROM information_schema.tables WHERE table_name = 'usage')" "SELECT EXISTS (SELECT FROM information_schema.tables WHERE table_name = 'usage')"
)) ))
table_exists = result.scalar() if not result.scalar():
# 表不存在,跳过
if not table_exists:
# 表在当前连接不可见(可能 baseline 还在事务中),跳过
# 索引将通过后续迁移或手动创建
return return
# 使用 IF NOT EXISTS 避免重复创建,无需单独检查索引是否存在 # 定义需要创建的索引
indexes = [
("idx_usage_user_created", "ON usage (user_id, created_at)"),
("idx_usage_apikey_created", "ON usage (api_key_id, created_at)"),
("idx_usage_provider_model_created", "ON usage (provider, model, created_at)"),
]
# 1. user_id + created_at 复合索引 (用户用量查询) # 分别检查并创建每个索引
autocommit_conn.execute(text( for index_name, index_def in indexes:
"CREATE INDEX CONCURRENTLY IF NOT EXISTS idx_usage_user_created " result = conn.execute(text(
"ON usage (user_id, created_at)" f"SELECT EXISTS (SELECT 1 FROM pg_indexes WHERE indexname = '{index_name}')"
)) ))
if result.scalar():
continue # 索引已存在,跳过
# 2. api_key_id + created_at 复合索引 (API Key 用量查询) conn.execute(text(f"CREATE INDEX {index_name} {index_def}"))
autocommit_conn.execute(text(
"CREATE INDEX CONCURRENTLY IF NOT EXISTS idx_usage_apikey_created "
"ON usage (api_key_id, created_at)"
))
# 3. provider + model + created_at 复合索引 (模型统计查询)
autocommit_conn.execute(text(
"CREATE INDEX CONCURRENTLY IF NOT EXISTS idx_usage_provider_model_created "
"ON usage (provider, model, created_at)"
))
def downgrade() -> None: def downgrade() -> None:

View File

@@ -1046,9 +1046,10 @@ async function handleKeyFormSubmit(data: StandaloneKeyFormData) {
rate_limit: data.rate_limit, rate_limit: data.rate_limit,
expire_days: data.never_expire ? null : (data.expire_days || null), expire_days: data.never_expire ? null : (data.expire_days || null),
auto_delete_on_expiry: data.auto_delete_on_expiry, auto_delete_on_expiry: data.auto_delete_on_expiry,
allowed_providers: data.allowed_providers.length > 0 ? data.allowed_providers : undefined, // 空数组表示清除限制(允许全部),后端会将空数组存为 NULL
allowed_api_formats: data.allowed_api_formats.length > 0 ? data.allowed_api_formats : undefined, allowed_providers: data.allowed_providers,
allowed_models: data.allowed_models.length > 0 ? data.allowed_models : undefined allowed_api_formats: data.allowed_api_formats,
allowed_models: data.allowed_models
} }
await adminApi.updateApiKey(data.id, updateData) await adminApi.updateApiKey(data.id, updateData)
success('API Key 更新成功') success('API Key 更新成功')
@@ -1064,9 +1065,10 @@ async function handleKeyFormSubmit(data: StandaloneKeyFormData) {
rate_limit: data.rate_limit, rate_limit: data.rate_limit,
expire_days: data.never_expire ? null : (data.expire_days || null), expire_days: data.never_expire ? null : (data.expire_days || null),
auto_delete_on_expiry: data.auto_delete_on_expiry, auto_delete_on_expiry: data.auto_delete_on_expiry,
allowed_providers: data.allowed_providers.length > 0 ? data.allowed_providers : undefined, // 空数组表示不设置限制(允许全部),后端会将空数组存为 NULL
allowed_api_formats: data.allowed_api_formats.length > 0 ? data.allowed_api_formats : undefined, allowed_providers: data.allowed_providers,
allowed_models: data.allowed_models.length > 0 ? data.allowed_models : undefined allowed_api_formats: data.allowed_api_formats,
allowed_models: data.allowed_models
} }
const response = await adminApi.createStandaloneApiKey(createData) const response = await adminApi.createStandaloneApiKey(createData)
newKeyValue.value = response.key newKeyValue.value = response.key

View File

@@ -301,6 +301,7 @@ function stopGlobalAutoRefresh() {
function handleAutoRefreshChange(value: boolean) { function handleAutoRefreshChange(value: boolean) {
globalAutoRefresh.value = value globalAutoRefresh.value = value
if (value) { if (value) {
refreshData() // 立即刷新一次
startGlobalAutoRefresh() startGlobalAutoRefresh()
} else { } else {
stopGlobalAutoRefresh() stopGlobalAutoRefresh()

View File

@@ -208,84 +208,118 @@ class CleanupScheduler:
return return
# 非首次运行,检查最近是否有缺失的日期需要回填 # 非首次运行,检查最近是否有缺失的日期需要回填
latest_stat = db.query(StatsDaily).order_by(StatsDaily.date.desc()).first() from src.models.database import StatsDailyModel
if latest_stat:
latest_date_utc = latest_stat.date
if latest_date_utc.tzinfo is None:
latest_date_utc = latest_date_utc.replace(tzinfo=timezone.utc)
else:
latest_date_utc = latest_date_utc.astimezone(timezone.utc)
# 使用业务日期计算缺失区间(避免用 UTC 年月日导致日期偏移,且对 DST 更安全)
latest_business_date = latest_date_utc.astimezone(app_tz).date()
yesterday_business_date = today_local.date() - timedelta(days=1) yesterday_business_date = today_local.date() - timedelta(days=1)
missing_start_date = latest_business_date + timedelta(days=1)
if missing_start_date <= yesterday_business_date:
missing_days = (
yesterday_business_date - missing_start_date
).days + 1
# 限制最大回填天数,防止停机很久后一次性回填太多
max_backfill_days: int = SystemConfigService.get_config( max_backfill_days: int = SystemConfigService.get_config(
db, "max_stats_backfill_days", 30 db, "max_stats_backfill_days", 30
) or 30 ) or 30
if missing_days > max_backfill_days:
logger.warning( # 计算回填检查的起始日期
f"缺失 {missing_days} 天数据超过最大回填限制 " check_start_date = yesterday_business_date - timedelta(
f"{max_backfill_days} 天,只回填最近 {max_backfill_days}"
)
missing_start_date = yesterday_business_date - timedelta(
days=max_backfill_days - 1 days=max_backfill_days - 1
) )
missing_days = max_backfill_days
# 获取 StatsDaily 和 StatsDailyModel 中已有数据的日期集合
existing_daily_dates = set()
existing_model_dates = set()
daily_stats = (
db.query(StatsDaily.date)
.filter(StatsDaily.date >= check_start_date.isoformat())
.all()
)
for (stat_date,) in daily_stats:
if stat_date.tzinfo is None:
stat_date = stat_date.replace(tzinfo=timezone.utc)
existing_daily_dates.add(stat_date.astimezone(app_tz).date())
model_stats = (
db.query(StatsDailyModel.date)
.filter(StatsDailyModel.date >= check_start_date.isoformat())
.distinct()
.all()
)
for (stat_date,) in model_stats:
if stat_date.tzinfo is None:
stat_date = stat_date.replace(tzinfo=timezone.utc)
existing_model_dates.add(stat_date.astimezone(app_tz).date())
# 找出需要回填的日期
all_dates = set()
current = check_start_date
while current <= yesterday_business_date:
all_dates.add(current)
current += timedelta(days=1)
# 需要回填 StatsDaily 的日期
missing_daily_dates = all_dates - existing_daily_dates
# 需要回填 StatsDailyModel 的日期
missing_model_dates = all_dates - existing_model_dates
# 合并所有需要处理的日期
dates_to_process = missing_daily_dates | missing_model_dates
if dates_to_process:
sorted_dates = sorted(dates_to_process)
logger.info( logger.info(
f"检测到缺失 {missing_days} 天的统计数据 " f"检测到 {len(dates_to_process)} 天的统计数据需要回填 "
f"({missing_start_date} ~ {yesterday_business_date}),开始回填..." f"(StatsDaily 缺失 {len(missing_daily_dates)} 天, "
f"StatsDailyModel 缺失 {len(missing_model_dates)} 天)"
) )
current_date = missing_start_date
users = ( users = (
db.query(DBUser.id).filter(DBUser.is_active.is_(True)).all() db.query(DBUser.id).filter(DBUser.is_active.is_(True)).all()
) )
while current_date <= yesterday_business_date: failed_dates = 0
failed_users = 0
for current_date in sorted_dates:
try: try:
current_date_local = datetime.combine( current_date_local = datetime.combine(
current_date, datetime.min.time(), tzinfo=app_tz current_date, datetime.min.time(), tzinfo=app_tz
) )
# 只在缺失时才聚合对应的表
if current_date in missing_daily_dates:
StatsAggregatorService.aggregate_daily_stats( StatsAggregatorService.aggregate_daily_stats(
db, current_date_local db, current_date_local
) )
if current_date in missing_model_dates:
StatsAggregatorService.aggregate_daily_model_stats( StatsAggregatorService.aggregate_daily_model_stats(
db, current_date_local db, current_date_local
) )
# 用户统计在任一缺失时都回填
for (user_id,) in users: for (user_id,) in users:
try: try:
StatsAggregatorService.aggregate_user_daily_stats( StatsAggregatorService.aggregate_user_daily_stats(
db, user_id, current_date_local db, user_id, current_date_local
) )
except Exception as e: except Exception as e:
failed_users += 1
logger.warning( logger.warning(
f"回填用户 {user_id} 日期 {current_date} 失败: {e}" f"回填用户 {user_id} 日期 {current_date} 失败: {e}"
) )
try: try:
db.rollback() db.rollback()
except Exception: except Exception as rollback_err:
pass logger.error(f"回滚失败: {rollback_err}")
except Exception as e: except Exception as e:
failed_dates += 1
logger.warning(f"回填日期 {current_date} 失败: {e}") logger.warning(f"回填日期 {current_date} 失败: {e}")
try: try:
db.rollback() db.rollback()
except Exception: except Exception as rollback_err:
pass logger.error(f"回滚失败: {rollback_err}")
current_date += timedelta(days=1)
StatsAggregatorService.update_summary(db) StatsAggregatorService.update_summary(db)
logger.info(f"缺失数据回填完成,共 {missing_days}")
if failed_dates > 0 or failed_users > 0:
logger.warning(
f"回填完成,共处理 {len(dates_to_process)} 天,"
f"失败: {failed_dates} 天, {failed_users} 个用户记录"
)
else:
logger.info(f"缺失数据回填完成,共处理 {len(dates_to_process)}")
else: else:
logger.info("统计数据已是最新,无需回填") logger.info("统计数据已是最新,无需回填")
return return

View File

@@ -59,14 +59,15 @@ class ApiKeyService:
if expire_days: if expire_days:
expires_at = datetime.now(timezone.utc) + timedelta(days=expire_days) expires_at = datetime.now(timezone.utc) + timedelta(days=expire_days)
# 空数组转为 None表示不限制
api_key = ApiKey( api_key = ApiKey(
user_id=user_id, user_id=user_id,
key_hash=key_hash, key_hash=key_hash,
key_encrypted=key_encrypted, key_encrypted=key_encrypted,
name=name or f"API Key {datetime.now(timezone.utc).strftime('%Y%m%d%H%M%S')}", name=name or f"API Key {datetime.now(timezone.utc).strftime('%Y%m%d%H%M%S')}",
allowed_providers=allowed_providers, allowed_providers=allowed_providers or None,
allowed_api_formats=allowed_api_formats, allowed_api_formats=allowed_api_formats or None,
allowed_models=allowed_models, allowed_models=allowed_models or None,
rate_limit=rate_limit, rate_limit=rate_limit,
concurrent_limit=concurrent_limit, concurrent_limit=concurrent_limit,
expires_at=expires_at, expires_at=expires_at,
@@ -141,8 +142,18 @@ class ApiKeyService:
"auto_delete_on_expiry", "auto_delete_on_expiry",
] ]
# 允许显式设置为空数组/None 的字段(空数组会转为 None表示"全部"
nullable_list_fields = {"allowed_providers", "allowed_api_formats", "allowed_models"}
for field, value in kwargs.items(): for field, value in kwargs.items():
if field in updatable_fields and value is not None: if field not in updatable_fields:
continue
# 对于 nullable_list_fields空数组应该转为 None表示不限制
if field in nullable_list_fields:
if value is not None:
# 空数组转为 None表示允许全部
setattr(api_key, field, value if value else None)
elif value is not None:
setattr(api_key, field, value) setattr(api_key, field, value)
api_key.updated_at = datetime.now(timezone.utc) api_key.updated_at = datetime.now(timezone.utc)