first commit

This commit is contained in:
wsq
2026-05-13 21:58:19 +08:00
commit 0167c66cb7
1475 changed files with 233414 additions and 0 deletions
+140
View File
@@ -0,0 +1,140 @@
import { prisma } from '@/lib/prisma'
import { toMoneyNumber } from '@/lib/billing/money'
type CleanupStats = {
scanned: number
stale: number
rolledBack: number
skipped: number
errors: number
}
function hasApplyFlag() {
return process.argv.includes('--apply')
}
function parseHoursArg(defaultHours: number) {
const arg = process.argv.find((item) => item.startsWith('--hours='))
if (!arg) return defaultHours
const value = Number(arg.slice('--hours='.length))
if (!Number.isFinite(value) || value <= 0) return defaultHours
return Math.floor(value)
}
function writeJson(payload: unknown) {
process.stdout.write(`${JSON.stringify(payload, null, 2)}\n`)
}
function writeError(payload: unknown) {
process.stderr.write(`${typeof payload === 'string' ? payload : JSON.stringify(payload, null, 2)}\n`)
}
async function main() {
const apply = hasApplyFlag()
const hours = parseHoursArg(24)
const cutoff = new Date(Date.now() - hours * 60 * 60 * 1000)
const pending = await prisma.balanceFreeze.findMany({
where: {
status: 'pending',
createdAt: { lt: cutoff },
},
orderBy: { createdAt: 'asc' },
})
const stats: CleanupStats = {
scanned: pending.length,
stale: pending.length,
rolledBack: 0,
skipped: 0,
errors: 0,
}
if (!apply) {
writeJson({
mode: 'dry-run',
hours,
cutoff: cutoff.toISOString(),
stalePendingCount: pending.length,
stalePending: pending.map((f) => ({
id: f.id,
userId: f.userId,
amount: toMoneyNumber(f.amount),
createdAt: f.createdAt.toISOString(),
})),
})
return
}
for (const freeze of pending) {
try {
await prisma.$transaction(async (tx) => {
const current = await tx.balanceFreeze.findUnique({
where: { id: freeze.id },
})
if (!current || current.status !== 'pending') {
stats.skipped += 1
return
}
const balance = await tx.userBalance.findUnique({
where: { userId: current.userId },
})
if (!balance) {
stats.skipped += 1
return
}
const frozenAmount = toMoneyNumber(balance.frozenAmount)
const freezeAmount = toMoneyNumber(current.amount)
const nextFrozenAmount = Math.max(0, frozenAmount - freezeAmount)
const frozenDelta = frozenAmount - nextFrozenAmount
const balanceIncrement = frozenDelta
await tx.userBalance.update({
where: { userId: current.userId },
data: {
balance: { increment: balanceIncrement },
frozenAmount: { decrement: frozenDelta },
},
})
await tx.balanceFreeze.update({
where: { id: current.id },
data: {
status: 'rolled_back',
},
})
})
stats.rolledBack += 1
} catch (error) {
stats.errors += 1
writeError({
tag: 'billing-cleanup-pending-freezes.rollback_failed',
freezeId: freeze.id,
userId: freeze.userId,
amount: toMoneyNumber(freeze.amount),
error: error instanceof Error ? error.message : String(error),
})
}
}
writeJson({
mode: 'apply',
hours,
cutoff: cutoff.toISOString(),
stats,
})
}
main()
.catch((error) => {
writeError({
tag: 'billing-cleanup-pending-freezes.fatal',
error: error instanceof Error ? error.message : String(error),
})
process.exit(1)
})
.finally(async () => {
await prisma.$disconnect()
})
+125
View File
@@ -0,0 +1,125 @@
import { prisma } from '@/lib/prisma'
import { roundMoney, toMoneyNumber } from '@/lib/billing/money'
type UserLedgerRow = {
userId: string
balance: number
frozenAmount: number
txNetAmount: number
ledgerAmount: number
diff: number
}
function hasStrictFlag() {
return process.argv.includes('--strict')
}
function write(payload: unknown) {
process.stdout.write(`${JSON.stringify(payload, null, 2)}\n`)
}
async function main() {
const strict = hasStrictFlag()
const [balances, txByUser, pendingFreezes] = await Promise.all([
prisma.userBalance.findMany({
select: {
userId: true,
balance: true,
frozenAmount: true,
},
}),
prisma.balanceTransaction.groupBy({
by: ['userId'],
_sum: { amount: true },
}),
prisma.balanceFreeze.findMany({
where: { status: 'pending' },
select: {
id: true,
userId: true,
taskId: true,
amount: true,
createdAt: true,
},
orderBy: { createdAt: 'asc' },
}),
])
const txNetByUser = new Map<string, number>()
for (const row of txByUser) {
txNetByUser.set(row.userId, roundMoney(toMoneyNumber(row._sum.amount), 8))
}
const ledgerRows: UserLedgerRow[] = balances.map((row) => {
const balance = toMoneyNumber(row.balance)
const frozenAmount = toMoneyNumber(row.frozenAmount)
const txNetAmount = roundMoney(txNetByUser.get(row.userId) || 0, 8)
const ledgerAmount = roundMoney(balance + frozenAmount, 8)
return {
userId: row.userId,
balance,
frozenAmount,
txNetAmount,
ledgerAmount,
diff: roundMoney(ledgerAmount - txNetAmount, 8),
}
})
const nonZeroDiffUsers = ledgerRows.filter((row) => Math.abs(row.diff) > 1e-8)
const pendingTaskIds = pendingFreezes
.map((row) => row.taskId)
.filter((taskId): taskId is string => typeof taskId === 'string' && taskId.length > 0)
const tasks = pendingTaskIds.length > 0
? await prisma.task.findMany({
where: { id: { in: pendingTaskIds } },
select: { id: true, status: true },
})
: []
const taskStatusById = new Map(tasks.map((row) => [row.id, row.status]))
const activeStatuses = new Set(['queued', 'processing'])
const orphanPendingFreezes = pendingFreezes.filter((freeze) => {
if (!freeze.taskId) return true
const status = taskStatusById.get(freeze.taskId)
if (!status) return true
return !activeStatuses.has(status)
})
const result = {
strict,
checkedAt: new Date().toISOString(),
totals: {
users: balances.length,
txUsers: txByUser.length,
pendingFreezes: pendingFreezes.length,
nonZeroDiffUsers: nonZeroDiffUsers.length,
orphanPendingFreezes: orphanPendingFreezes.length,
},
nonZeroDiffUsers,
orphanPendingFreezes: orphanPendingFreezes.map((row) => ({
id: row.id,
userId: row.userId,
taskId: row.taskId,
amount: toMoneyNumber(row.amount),
createdAt: row.createdAt.toISOString(),
})),
}
write(result)
if (strict && (nonZeroDiffUsers.length > 0 || orphanPendingFreezes.length > 0)) {
process.exitCode = 1
}
}
main()
.catch((error) => {
write({
error: error instanceof Error ? error.message : String(error),
})
process.exitCode = 1
})
.finally(async () => {
await prisma.$disconnect()
})
+105
View File
@@ -0,0 +1,105 @@
import { createScopedLogger } from '@/lib/logging/core'
import express, { type NextFunction, type Request, type Response } from 'express'
import { createBullBoard } from '@bull-board/api'
import { BullMQAdapter } from '@bull-board/api/bullMQAdapter'
import { ExpressAdapter } from '@bull-board/express'
import { imageQueue, textQueue, videoQueue, voiceQueue } from '@/lib/task/queues'
const host = process.env.BULL_BOARD_HOST || '127.0.0.1'
const port = Number.parseInt(process.env.BULL_BOARD_PORT || '3010', 10) || 3010
const basePath = process.env.BULL_BOARD_BASE_PATH || '/admin/queues'
const authUser = process.env.BULL_BOARD_USER
const authPassword = process.env.BULL_BOARD_PASSWORD
const logger = createScopedLogger({
module: 'ops.bull_board',
})
function unauthorized(res: Response) {
res.setHeader('WWW-Authenticate', 'Basic realm="BullMQ Board"')
res.status(401).send('Authentication required')
}
function basicAuthMiddleware(req: Request, res: Response, next: NextFunction) {
if (!authUser && !authPassword) {
next()
return
}
const authorization = req.headers.authorization
if (!authorization?.startsWith('Basic ')) {
unauthorized(res)
return
}
const encoded = authorization.slice(6).trim()
let decoded = ''
try {
decoded = Buffer.from(encoded, 'base64').toString('utf8')
} catch {
unauthorized(res)
return
}
const index = decoded.indexOf(':')
if (index === -1) {
unauthorized(res)
return
}
const username = decoded.slice(0, index)
const password = decoded.slice(index + 1)
if (username !== (authUser || '') || password !== (authPassword || '')) {
unauthorized(res)
return
}
next()
}
const serverAdapter = new ExpressAdapter()
serverAdapter.setBasePath(basePath)
createBullBoard({
queues: [
new BullMQAdapter(imageQueue),
new BullMQAdapter(videoQueue),
new BullMQAdapter(voiceQueue),
new BullMQAdapter(textQueue),
],
serverAdapter,
})
const app = express()
app.disable('x-powered-by')
app.use(basePath, basicAuthMiddleware, serverAdapter.getRouter())
const server = app.listen(port, host, () => {
const secured = authUser || authPassword ? 'enabled' : 'disabled'
logger.info({
action: 'bull_board.started',
message: 'bull board listening',
details: {
host,
port,
basePath,
auth: secured,
},
})
})
async function shutdown(signal: string) {
logger.info({
action: 'bull_board.shutdown',
message: 'bull board shutting down',
details: {
signal,
},
})
await Promise.allSettled([imageQueue.close(), videoQueue.close(), voiceQueue.close(), textQueue.close()])
await new Promise<void>((resolve) => server.close(() => resolve()))
process.exit(0)
}
process.on('SIGINT', () => void shutdown('SIGINT'))
process.on('SIGTERM', () => void shutdown('SIGTERM'))
+38
View File
@@ -0,0 +1,38 @@
import { logInfo as _ulogInfo, logError as _ulogError } from '@/lib/logging/core'
import { execSync } from 'node:child_process'
const ALLOWLIST = new Set([
'src/app/api/auth/[...nextauth]/route.ts',
'src/app/api/files/[...path]/route.ts',
'src/app/api/system/boot-id/route.ts',
])
function main() {
const output = execSync("rg --files src/app/api | rg 'route\\.ts$'", { encoding: 'utf8' })
const files = output
.split('\n')
.map((line) => line.trim())
.filter(Boolean)
const missing: string[] = []
for (const file of files) {
if (ALLOWLIST.has(file)) continue
const hasApiHandler = execSync(`rg -n \"apiHandler\" ${JSON.stringify(file)} || true`, { encoding: 'utf8' }).trim().length > 0
if (!hasApiHandler) {
missing.push(file)
}
}
if (missing.length > 0) {
_ulogError('[check-api-handler] missing apiHandler in:')
for (const file of missing) {
_ulogError(`- ${file}`)
}
process.exit(1)
}
_ulogInfo(`[check-api-handler] ok total=${files.length} allowlist=${ALLOWLIST.size}`)
}
main()
+334
View File
@@ -0,0 +1,334 @@
import { promises as fs } from 'node:fs'
import path from 'node:path'
const CATALOG_DIR = path.resolve(process.cwd(), 'standards/capabilities')
const CAPABILITY_NAMESPACES = new Set(['llm', 'image', 'video', 'audio', 'lipsync'])
const CAPABILITY_NAMESPACE_ALLOWED_FIELDS = {
llm: new Set(['reasoningEffortOptions', 'fieldI18n']),
image: new Set(['resolutionOptions', 'fieldI18n']),
video: new Set([
'generationModeOptions',
'generateAudioOptions',
'durationOptions',
'fpsOptions',
'resolutionOptions',
'firstlastframe',
'supportGenerateAudio',
'fieldI18n',
]),
audio: new Set(['voiceOptions', 'rateOptions', 'fieldI18n']),
lipsync: new Set(['modeOptions', 'fieldI18n']),
}
const CAPABILITY_NAMESPACE_I18N_FIELDS = {
llm: { reasoningEffort: 'reasoningEffortOptions' },
image: { resolution: 'resolutionOptions' },
video: {
generationMode: 'generationModeOptions',
generateAudio: 'generateAudioOptions',
duration: 'durationOptions',
fps: 'fpsOptions',
resolution: 'resolutionOptions',
},
audio: { voice: 'voiceOptions', rate: 'rateOptions' },
lipsync: { mode: 'modeOptions' },
}
const MODEL_TYPES = new Set(['llm', 'image', 'video', 'audio', 'lipsync'])
function isRecord(value) {
return !!value && typeof value === 'object' && !Array.isArray(value)
}
function isNonEmptyString(value) {
return typeof value === 'string' && value.trim().length > 0
}
function isI18nKey(value) {
return isNonEmptyString(value) && value.includes('.')
}
function isStringArray(value) {
return Array.isArray(value) && value.every((item) => isNonEmptyString(item))
}
function isNumberArray(value) {
return Array.isArray(value) && value.every((item) => typeof item === 'number' && Number.isFinite(item))
}
function isBooleanArray(value) {
return Array.isArray(value) && value.every((item) => typeof item === 'boolean')
}
function parseModelKeyStrict(value) {
if (!isNonEmptyString(value)) return null
const raw = value.trim()
const marker = raw.indexOf('::')
if (marker === -1) return null
const provider = raw.slice(0, marker).trim()
const modelId = raw.slice(marker + 2).trim()
if (!provider || !modelId) return null
return { provider, modelId, modelKey: `${provider}::${modelId}` }
}
function pushIssue(issues, file, index, field, message) {
issues.push({ file, index, field, message })
}
function validateAllowedFields(issues, file, index, namespace, namespaceValue) {
if (!isRecord(namespaceValue)) return
const allowedFields = CAPABILITY_NAMESPACE_ALLOWED_FIELDS[namespace]
for (const field of Object.keys(namespaceValue)) {
if (allowedFields.has(field)) continue
if (field === 'i18n') {
pushIssue(issues, file, index, `capabilities.${namespace}.${field}`, 'use fieldI18n instead of i18n')
continue
}
pushIssue(issues, file, index, `capabilities.${namespace}.${field}`, `unknown capability field: ${field}`)
}
}
function validateFieldI18nMap(issues, file, index, namespace, namespaceValue) {
if (!isRecord(namespaceValue)) return
if (namespaceValue.fieldI18n === undefined) return
if (!isRecord(namespaceValue.fieldI18n)) {
pushIssue(issues, file, index, `capabilities.${namespace}.fieldI18n`, 'fieldI18n must be an object')
return
}
const allowedI18nFields = CAPABILITY_NAMESPACE_I18N_FIELDS[namespace]
for (const [fieldName, fieldConfig] of Object.entries(namespaceValue.fieldI18n)) {
if (!(fieldName in allowedI18nFields)) {
pushIssue(issues, file, index, `capabilities.${namespace}.fieldI18n.${fieldName}`, `unknown i18n field: ${fieldName}`)
continue
}
if (!isRecord(fieldConfig)) {
pushIssue(issues, file, index, `capabilities.${namespace}.fieldI18n.${fieldName}`, 'field i18n config must be an object')
continue
}
if (fieldConfig.labelKey !== undefined && !isI18nKey(fieldConfig.labelKey)) {
pushIssue(issues, file, index, `capabilities.${namespace}.fieldI18n.${fieldName}.labelKey`, 'labelKey must be an i18n key')
}
if (fieldConfig.unitKey !== undefined && !isI18nKey(fieldConfig.unitKey)) {
pushIssue(issues, file, index, `capabilities.${namespace}.fieldI18n.${fieldName}.unitKey`, 'unitKey must be an i18n key')
}
if (fieldConfig.optionLabelKeys !== undefined) {
if (!isRecord(fieldConfig.optionLabelKeys)) {
pushIssue(
issues,
file,
index,
`capabilities.${namespace}.fieldI18n.${fieldName}.optionLabelKeys`,
'optionLabelKeys must be an object',
)
continue
}
const optionFieldName = allowedI18nFields[fieldName]
const optionsRaw = namespaceValue[optionFieldName]
const allowedOptions = Array.isArray(optionsRaw) ? new Set(optionsRaw.map((value) => String(value))) : null
for (const [optionValue, optionLabel] of Object.entries(fieldConfig.optionLabelKeys)) {
if (!isI18nKey(optionLabel)) {
pushIssue(
issues,
file,
index,
`capabilities.${namespace}.fieldI18n.${fieldName}.optionLabelKeys.${optionValue}`,
'option label must be an i18n key',
)
}
if (allowedOptions && !allowedOptions.has(optionValue)) {
pushIssue(
issues,
file,
index,
`capabilities.${namespace}.fieldI18n.${fieldName}.optionLabelKeys.${optionValue}`,
`option ${optionValue} is not defined in ${optionFieldName}`,
)
}
}
}
}
}
function validateCapabilitiesForModelType(issues, file, index, modelType, capabilities) {
if (capabilities === undefined || capabilities === null) return
if (!isRecord(capabilities)) {
pushIssue(issues, file, index, 'capabilities', 'capabilities must be an object')
return
}
const expectedNamespace = modelType
for (const namespace of Object.keys(capabilities)) {
if (!CAPABILITY_NAMESPACES.has(namespace)) {
pushIssue(issues, file, index, `capabilities.${namespace}`, `unknown capabilities namespace: ${namespace}`)
continue
}
if (namespace !== expectedNamespace) {
pushIssue(
issues,
file,
index,
`capabilities.${namespace}`,
`namespace ${namespace} is not allowed for model type ${modelType}`,
)
}
}
const llm = capabilities.llm
if (llm !== undefined) {
if (!isRecord(llm)) {
pushIssue(issues, file, index, 'capabilities.llm', 'llm capabilities must be an object')
} else {
validateAllowedFields(issues, file, index, 'llm', llm)
if (llm.reasoningEffortOptions !== undefined && !isStringArray(llm.reasoningEffortOptions)) {
pushIssue(issues, file, index, 'capabilities.llm.reasoningEffortOptions', 'must be string array')
}
validateFieldI18nMap(issues, file, index, 'llm', llm)
}
}
const image = capabilities.image
if (image !== undefined) {
if (!isRecord(image)) {
pushIssue(issues, file, index, 'capabilities.image', 'image capabilities must be an object')
} else {
validateAllowedFields(issues, file, index, 'image', image)
if (image.resolutionOptions !== undefined && !isStringArray(image.resolutionOptions)) {
pushIssue(issues, file, index, 'capabilities.image.resolutionOptions', 'must be string array')
}
validateFieldI18nMap(issues, file, index, 'image', image)
}
}
const video = capabilities.video
if (video !== undefined) {
if (!isRecord(video)) {
pushIssue(issues, file, index, 'capabilities.video', 'video capabilities must be an object')
} else {
validateAllowedFields(issues, file, index, 'video', video)
if (video.generationModeOptions !== undefined && !isStringArray(video.generationModeOptions)) {
pushIssue(issues, file, index, 'capabilities.video.generationModeOptions', 'must be string array')
}
if (video.generateAudioOptions !== undefined && !isBooleanArray(video.generateAudioOptions)) {
pushIssue(issues, file, index, 'capabilities.video.generateAudioOptions', 'must be boolean array')
}
if (video.durationOptions !== undefined && !isNumberArray(video.durationOptions)) {
pushIssue(issues, file, index, 'capabilities.video.durationOptions', 'must be number array')
}
if (video.fpsOptions !== undefined && !isNumberArray(video.fpsOptions)) {
pushIssue(issues, file, index, 'capabilities.video.fpsOptions', 'must be number array')
}
if (video.resolutionOptions !== undefined && !isStringArray(video.resolutionOptions)) {
pushIssue(issues, file, index, 'capabilities.video.resolutionOptions', 'must be string array')
}
if (video.supportGenerateAudio !== undefined && typeof video.supportGenerateAudio !== 'boolean') {
pushIssue(issues, file, index, 'capabilities.video.supportGenerateAudio', 'must be boolean')
}
if (video.firstlastframe !== undefined && typeof video.firstlastframe !== 'boolean') {
pushIssue(issues, file, index, 'capabilities.video.firstlastframe', 'must be boolean')
}
validateFieldI18nMap(issues, file, index, 'video', video)
}
}
const audio = capabilities.audio
if (audio !== undefined) {
if (!isRecord(audio)) {
pushIssue(issues, file, index, 'capabilities.audio', 'audio capabilities must be an object')
} else {
validateAllowedFields(issues, file, index, 'audio', audio)
if (audio.voiceOptions !== undefined && !isStringArray(audio.voiceOptions)) {
pushIssue(issues, file, index, 'capabilities.audio.voiceOptions', 'must be string array')
}
if (audio.rateOptions !== undefined && !isStringArray(audio.rateOptions)) {
pushIssue(issues, file, index, 'capabilities.audio.rateOptions', 'must be string array')
}
validateFieldI18nMap(issues, file, index, 'audio', audio)
}
}
const lipsync = capabilities.lipsync
if (lipsync !== undefined) {
if (!isRecord(lipsync)) {
pushIssue(issues, file, index, 'capabilities.lipsync', 'lipsync capabilities must be an object')
} else {
validateAllowedFields(issues, file, index, 'lipsync', lipsync)
if (lipsync.modeOptions !== undefined && !isStringArray(lipsync.modeOptions)) {
pushIssue(issues, file, index, 'capabilities.lipsync.modeOptions', 'must be string array')
}
validateFieldI18nMap(issues, file, index, 'lipsync', lipsync)
}
}
}
async function listCatalogFiles() {
const entries = await fs.readdir(CATALOG_DIR, { withFileTypes: true })
return entries
.filter((entry) => entry.isFile() && entry.name.endsWith('.json'))
.map((entry) => path.join(CATALOG_DIR, entry.name))
}
async function readCatalog(filePath) {
const raw = await fs.readFile(filePath, 'utf8')
const parsed = JSON.parse(raw)
if (!Array.isArray(parsed)) {
throw new Error(`catalog must be an array: ${filePath}`)
}
return parsed
}
async function main() {
const issues = []
const files = await listCatalogFiles()
if (files.length === 0) {
throw new Error(`no catalog files found in ${CATALOG_DIR}`)
}
for (const filePath of files) {
const catalogItems = await readCatalog(filePath)
for (let index = 0; index < catalogItems.length; index += 1) {
const item = catalogItems[index]
if (!isRecord(item)) {
pushIssue(issues, filePath, index, 'entry', 'entry must be an object')
continue
}
if (!isNonEmptyString(item.modelType) || !MODEL_TYPES.has(item.modelType)) {
pushIssue(issues, filePath, index, 'modelType', 'modelType must be llm/image/video/audio/lipsync')
continue
}
if (!isNonEmptyString(item.provider)) {
pushIssue(issues, filePath, index, 'provider', 'provider must be a non-empty string')
}
if (!isNonEmptyString(item.modelId)) {
pushIssue(issues, filePath, index, 'modelId', 'modelId must be a non-empty string')
}
const modelKey = `${item.provider || ''}::${item.modelId || ''}`
if (!parseModelKeyStrict(modelKey)) {
pushIssue(issues, filePath, index, 'modelKey', 'provider/modelId must compose a valid provider::modelId')
}
validateCapabilitiesForModelType(issues, filePath, index, item.modelType, item.capabilities)
}
}
if (issues.length === 0) {
process.stdout.write(`[check-capability-catalog] OK (${files.length} files)\n`)
return
}
const maxPrint = 50
for (const issue of issues.slice(0, maxPrint)) {
process.stdout.write(`[check-capability-catalog] ${issue.file}#${issue.index} ${issue.field}: ${issue.message}\n`)
}
if (issues.length > maxPrint) {
process.stdout.write(`[check-capability-catalog] ... ${issues.length - maxPrint} more issues\n`)
}
process.exitCode = 1
}
main().catch((error) => {
process.stderr.write(`[check-capability-catalog] failed: ${String(error)}\n`)
process.exitCode = 1
})
+118
View File
@@ -0,0 +1,118 @@
import { logInfo as _ulogInfo, logError as _ulogError } from '@/lib/logging/core'
import { prisma } from '@/lib/prisma'
import { decodeImageUrlsFromDb } from '@/lib/contracts/image-urls-contract'
type AppearanceRow = {
id: string
imageUrls: string | null
previousImageUrls: string | null
}
type DynamicModel = {
findMany: (args: unknown) => Promise<AppearanceRow[]>
}
const BATCH_SIZE = 500
const MODELS: Array<{ name: string; model: string }> = [
{ name: 'CharacterAppearance', model: 'characterAppearance' },
{ name: 'GlobalCharacterAppearance', model: 'globalCharacterAppearance' },
]
const prismaDynamic = prisma as unknown as Record<string, DynamicModel>
function print(message: string) {
process.stdout.write(`${message}\n`)
}
async function checkModel(modelName: string, modelKey: string) {
const model = prismaDynamic[modelKey]
if (!model) {
throw new Error(`Prisma model not found: ${modelKey}`)
}
let scanned = 0
let violations = 0
const samples: Array<{ id: string; field: 'imageUrls' | 'previousImageUrls'; message: string; value: string | null }> = []
let cursor: string | null = null
while (true) {
const rows = await model.findMany({
select: {
id: true,
imageUrls: true,
previousImageUrls: true,
},
...(cursor
? {
cursor: { id: cursor },
skip: 1,
}
: {}),
orderBy: { id: 'asc' },
take: BATCH_SIZE,
})
if (rows.length === 0) break
for (const row of rows) {
scanned += 1
for (const fieldName of ['imageUrls', 'previousImageUrls'] as const) {
try {
decodeImageUrlsFromDb(row[fieldName], `${modelName}.${fieldName}`)
} catch (error) {
violations += 1
if (samples.length < 20) {
samples.push({
id: row.id,
field: fieldName,
message: error instanceof Error ? error.message : String(error),
value: row[fieldName],
})
}
}
}
}
cursor = rows[rows.length - 1]?.id || null
}
const summary = `[check-image-urls-contract] ${modelName}: scanned=${scanned} violations=${violations}`
_ulogInfo(summary)
print(summary)
if (samples.length > 0) {
_ulogError(`[check-image-urls-contract] ${modelName}: samples=${JSON.stringify(samples, null, 2)}`)
}
return { scanned, violations }
}
async function main() {
let totalScanned = 0
let totalViolations = 0
for (const target of MODELS) {
const result = await checkModel(target.name, target.model)
totalScanned += result.scanned
totalViolations += result.violations
}
if (totalViolations > 0) {
_ulogError(`[check-image-urls-contract] failed scanned=${totalScanned} violations=${totalViolations}`)
print(`[check-image-urls-contract] failed scanned=${totalScanned} violations=${totalViolations}`)
process.exitCode = 1
return
}
print(`[check-image-urls-contract] ok scanned=${totalScanned}`)
}
main()
.catch((error) => {
_ulogError('[check-image-urls-contract] failed:', error)
process.exitCode = 1
})
.finally(async () => {
await prisma.$disconnect()
})
+110
View File
@@ -0,0 +1,110 @@
import fs from 'node:fs'
type Rule = {
file: string
patterns: string[]
}
const RULES: Rule[] = [
{
file: 'src/lib/api-errors.ts',
patterns: ['x-request-id', 'api.request.start', 'api.request.finish', 'api.request.error'],
},
{
file: 'src/lib/workers/shared.ts',
patterns: ['worker.start', 'worker.completed', 'worker.failed', 'durationMs', 'errorCode'],
},
{
file: 'src/app/api/sse/route.ts',
patterns: ['sse.connect', 'sse.replay', 'sse.disconnect'],
},
{
file: 'scripts/watchdog.ts',
patterns: ['watchdog.started', 'watchdog.tick.ok', 'watchdog.tick.failed'],
},
{
file: 'scripts/bull-board.ts',
patterns: ['bull_board.started', 'bull_board.shutdown'],
},
{
file: 'src/lib/task/submitter.ts',
patterns: ['requestId', 'task.submit.created', 'task.submit.enqueued'],
},
{
file: 'src/lib/task/types.ts',
patterns: ['trace', 'requestId'],
},
]
function read(file: string) {
return fs.readFileSync(file, 'utf8')
}
function checkRules() {
const violations: string[] = []
for (const rule of RULES) {
const content = read(rule.file)
for (const pattern of rule.patterns) {
if (!content.includes(pattern)) {
violations.push(`${rule.file} missing "${pattern}"`)
}
}
}
return violations
}
function checkSubmitTaskRoutes() {
const root = 'src/app/api'
const files = walk(root).filter((file) => file.endsWith('/route.ts'))
const submitTaskFiles = files.filter((file) => read(file).includes('submitTask('))
const violations: string[] = []
for (const file of submitTaskFiles) {
const content = read(file)
if (!content.includes('getRequestId')) {
violations.push(`${file} uses submitTask but does not import getRequestId`)
continue
}
if (!content.includes('requestId: getRequestId(request)')) {
violations.push(`${file} uses submitTask but does not pass requestId`)
}
}
return { submitTaskFiles, violations }
}
function walk(dir: string): string[] {
const entries = fs.readdirSync(dir, { withFileTypes: true })
const out: string[] = []
for (const entry of entries) {
const next = `${dir}/${entry.name}`
if (entry.isDirectory()) {
out.push(...walk(next))
} else {
out.push(next)
}
}
return out
}
function main() {
const violations = checkRules()
const submitTaskResult = checkSubmitTaskRoutes()
violations.push(...submitTaskResult.violations)
if (violations.length > 0) {
process.stderr.write('[check:log-semantic] semantic violations detected:\n')
for (const violation of violations) {
process.stderr.write(`- ${violation}\n`)
}
process.exit(1)
}
process.stdout.write(
`[check:log-semantic] ok rules=${RULES.length} submitTaskRoutes=${submitTaskResult.submitTaskFiles.length}\n`,
)
}
main()
+110
View File
@@ -0,0 +1,110 @@
import { execSync } from 'node:child_process'
const TARGETS = ['src/app/api', 'src/lib']
const EXTRACT_ALLOWLIST = new Set<string>([
'src/lib/media/service.ts',
'src/lib/voice/generate-voice-line.ts',
])
const FETCH_MEDIA_ALLOWLIST = new Set<string>([
'src/lib/media-process.ts',
'src/lib/image-cache.ts',
'src/lib/image-label.ts',
'src/lib/workers/utils.ts',
'src/app/api/novel-promotion/[projectId]/download-images/route.ts',
'src/app/api/novel-promotion/[projectId]/download-videos/route.ts',
'src/app/api/novel-promotion/[projectId]/download-voices/route.ts',
'src/app/api/novel-promotion/[projectId]/update-asset-label/route.ts',
'src/app/api/novel-promotion/[projectId]/voice-generate/route.ts',
'src/app/api/novel-promotion/[projectId]/video-proxy/route.ts',
])
function run(cmd: string): string {
try {
return execSync(cmd, { encoding: 'utf8' })
} catch (error: unknown) {
if (error && typeof error === 'object' && 'stdout' in error) {
const stdout = (error as { stdout?: unknown }).stdout
return typeof stdout === 'string' ? stdout : ''
}
return ''
}
}
function parseLines(output: string): string[] {
return output
.split('\n')
.map((line) => line.trim())
.filter(Boolean)
}
function getFile(line: string): string {
return line.split(':', 1)[0] || ''
}
function getCode(line: string): string {
const parts = line.split(':')
return parts.slice(2).join(':').trim()
}
function extractFetchArg(code: string): string {
const matched = code.match(/fetch\(\s*([^)]+)\)/)
return matched?.[1]?.trim() || ''
}
function isSafeFetchArg(arg: string): boolean {
if (!arg) return false
if (/^toFetchableUrl\(/.test(arg)) return true
if (/^['"`]/.test(arg)) return true
if (/^new URL\(/.test(arg)) return true
return false
}
function isMediaLikeFetchArg(arg: string): boolean {
return /(image|video|audio|signed).*url/i.test(arg) || /url.*(image|video|audio|signed)/i.test(arg)
}
function main() {
const targetExpr = TARGETS.join(' ')
// 规则 1:业务代码中不允许直接调用 extractStorageKey(统一走 resolveStorageKeyFromMediaValue
const extractOutput = run(`rg -n "extractStorageKey\\\\(" ${targetExpr}`)
const extractLines = parseLines(extractOutput)
const extractViolations = extractLines.filter((line) => {
const file = getFile(line)
if (file.startsWith('src/lib/storage/')) return false
return !EXTRACT_ALLOWLIST.has(file)
})
// 规则 2:媒体相关 fetch 必须包裹 toFetchableUrl
const fetchOutput = run(`rg -n "fetch\\\\(" ${targetExpr}`)
const fetchLines = parseLines(fetchOutput)
const fetchViolations = fetchLines.filter((line) => {
const file = getFile(line)
if (!FETCH_MEDIA_ALLOWLIST.has(file)) return false
const code = getCode(line)
const arg = extractFetchArg(code)
if (!isMediaLikeFetchArg(arg)) return false
return !isSafeFetchArg(arg)
})
const violations = [
...extractViolations.map((line) => `extractStorageKey forbidden: ${line}`),
...fetchViolations.map((line) => `fetch without toFetchableUrl: ${line}`),
]
if (violations.length > 0) {
process.stderr.write('[check:media-normalization] found violations:\n')
for (const item of violations) {
process.stderr.write(`- ${item}\n`)
}
process.exit(1)
}
process.stdout.write(
`[check:media-normalization] ok extract_scanned=${extractLines.length} fetch_scanned=${fetchLines.length} allow_extract=${EXTRACT_ALLOWLIST.size} allow_fetch=${FETCH_MEDIA_ALLOWLIST.size}\n`,
)
}
main()
+462
View File
@@ -0,0 +1,462 @@
let prisma
const STRICT = process.argv.includes('--strict')
const MODEL_FIELDS = [
'analysisModel',
'characterModel',
'locationModel',
'storyboardModel',
'editModel',
'videoModel',
]
const MAX_SAMPLES = 200
const CAPABILITY_NAMESPACES = new Set(['llm', 'image', 'video', 'audio', 'lipsync'])
const MODEL_TYPES = new Set(['llm', 'image', 'video', 'audio', 'lipsync'])
const CAPABILITY_NAMESPACE_ALLOWED_FIELDS = {
llm: new Set(['reasoningEffortOptions', 'fieldI18n']),
image: new Set(['resolutionOptions', 'fieldI18n']),
video: new Set([
'durationOptions',
'fpsOptions',
'resolutionOptions',
'firstlastframe',
'supportGenerateAudio',
'fieldI18n',
]),
audio: new Set(['voiceOptions', 'rateOptions', 'fieldI18n']),
lipsync: new Set(['modeOptions', 'fieldI18n']),
}
const CAPABILITY_NAMESPACE_I18N_FIELDS = {
llm: {
reasoningEffort: 'reasoningEffortOptions',
},
image: {
resolution: 'resolutionOptions',
},
video: {
duration: 'durationOptions',
fps: 'fpsOptions',
resolution: 'resolutionOptions',
},
audio: {
voice: 'voiceOptions',
rate: 'rateOptions',
},
lipsync: {
mode: 'modeOptions',
},
}
function isRecord(value) {
return !!value && typeof value === 'object' && !Array.isArray(value)
}
function isNonEmptyString(value) {
return typeof value === 'string' && value.trim().length > 0
}
function isStringArray(value) {
return Array.isArray(value) && value.every((item) => isNonEmptyString(item))
}
function isNumberArray(value) {
return Array.isArray(value) && value.every((item) => typeof item === 'number' && Number.isFinite(item))
}
function parseModelKeyStrict(value) {
if (!isNonEmptyString(value)) return null
const raw = value.trim()
const marker = raw.indexOf('::')
if (marker === -1) return null
const provider = raw.slice(0, marker).trim()
const modelId = raw.slice(marker + 2).trim()
if (!provider || !modelId) return null
return {
provider,
modelId,
modelKey: `${provider}::${modelId}`,
}
}
function addSample(summary, sample) {
if (summary.samples.length >= MAX_SAMPLES) return
summary.samples.push(sample)
}
function pushIssue(issues, field, message) {
issues.push({ field, message })
}
function isI18nKey(value) {
return isNonEmptyString(value) && value.includes('.')
}
function validateAllowedFields(issues, namespace, namespaceValue) {
if (!isRecord(namespaceValue)) return
const allowedFields = CAPABILITY_NAMESPACE_ALLOWED_FIELDS[namespace]
for (const field of Object.keys(namespaceValue)) {
if (allowedFields.has(field)) continue
if (field === 'i18n') {
pushIssue(issues, `capabilities.${namespace}.${field}`, 'use fieldI18n instead of i18n')
continue
}
pushIssue(issues, `capabilities.${namespace}.${field}`, `unknown capability field: ${field}`)
}
}
function validateFieldI18nMap(issues, namespace, namespaceValue) {
if (!isRecord(namespaceValue)) return
if (namespaceValue.fieldI18n === undefined) return
if (!isRecord(namespaceValue.fieldI18n)) {
pushIssue(issues, `capabilities.${namespace}.fieldI18n`, 'fieldI18n must be an object')
return
}
const allowedI18nFields = CAPABILITY_NAMESPACE_I18N_FIELDS[namespace]
for (const [fieldName, fieldConfig] of Object.entries(namespaceValue.fieldI18n)) {
if (!(fieldName in allowedI18nFields)) {
pushIssue(
issues,
`capabilities.${namespace}.fieldI18n.${fieldName}`,
`unknown i18n field: ${fieldName}`,
)
continue
}
if (!isRecord(fieldConfig)) {
pushIssue(
issues,
`capabilities.${namespace}.fieldI18n.${fieldName}`,
'field i18n config must be an object',
)
continue
}
if (fieldConfig.labelKey !== undefined && !isI18nKey(fieldConfig.labelKey)) {
pushIssue(
issues,
`capabilities.${namespace}.fieldI18n.${fieldName}.labelKey`,
'labelKey must be an i18n key',
)
}
if (fieldConfig.unitKey !== undefined && !isI18nKey(fieldConfig.unitKey)) {
pushIssue(
issues,
`capabilities.${namespace}.fieldI18n.${fieldName}.unitKey`,
'unitKey must be an i18n key',
)
}
if (fieldConfig.optionLabelKeys !== undefined) {
if (!isRecord(fieldConfig.optionLabelKeys)) {
pushIssue(
issues,
`capabilities.${namespace}.fieldI18n.${fieldName}.optionLabelKeys`,
'optionLabelKeys must be an object',
)
continue
}
const optionFieldName = allowedI18nFields[fieldName]
const allowedOptionsRaw = namespaceValue[optionFieldName]
const allowedOptions = Array.isArray(allowedOptionsRaw)
? new Set(allowedOptionsRaw.map((value) => String(value)))
: null
for (const [optionValue, optionLabelKey] of Object.entries(fieldConfig.optionLabelKeys)) {
if (!isI18nKey(optionLabelKey)) {
pushIssue(
issues,
`capabilities.${namespace}.fieldI18n.${fieldName}.optionLabelKeys.${optionValue}`,
'option label must be an i18n key',
)
}
if (allowedOptions && !allowedOptions.has(optionValue)) {
pushIssue(
issues,
`capabilities.${namespace}.fieldI18n.${fieldName}.optionLabelKeys.${optionValue}`,
`option ${optionValue} is not defined in ${optionFieldName}`,
)
}
}
}
}
}
function validateCapabilities(modelType, capabilities) {
const issues = []
if (!MODEL_TYPES.has(modelType)) {
pushIssue(issues, 'type', 'type must be llm/image/video/audio/lipsync')
return issues
}
if (capabilities === undefined || capabilities === null) return issues
if (!isRecord(capabilities)) {
pushIssue(issues, 'capabilities', 'capabilities must be an object')
return issues
}
for (const namespace of Object.keys(capabilities)) {
if (!CAPABILITY_NAMESPACES.has(namespace)) {
pushIssue(issues, `capabilities.${namespace}`, `unknown capabilities namespace: ${namespace}`)
continue
}
if (namespace !== modelType) {
pushIssue(issues, `capabilities.${namespace}`, `namespace ${namespace} is not allowed for model type ${modelType}`)
}
}
const llm = capabilities.llm
if (llm !== undefined) {
if (!isRecord(llm)) {
pushIssue(issues, 'capabilities.llm', 'llm capabilities must be an object')
} else {
validateAllowedFields(issues, 'llm', llm)
if (llm.reasoningEffortOptions !== undefined && !isStringArray(llm.reasoningEffortOptions)) {
pushIssue(issues, 'capabilities.llm.reasoningEffortOptions', 'must be string array')
}
validateFieldI18nMap(issues, 'llm', llm)
}
}
const image = capabilities.image
if (image !== undefined) {
if (!isRecord(image)) {
pushIssue(issues, 'capabilities.image', 'image capabilities must be an object')
} else {
validateAllowedFields(issues, 'image', image)
if (image.resolutionOptions !== undefined && !isStringArray(image.resolutionOptions)) {
pushIssue(issues, 'capabilities.image.resolutionOptions', 'must be string array')
}
validateFieldI18nMap(issues, 'image', image)
}
}
const video = capabilities.video
if (video !== undefined) {
if (!isRecord(video)) {
pushIssue(issues, 'capabilities.video', 'video capabilities must be an object')
} else {
validateAllowedFields(issues, 'video', video)
if (video.durationOptions !== undefined && !isNumberArray(video.durationOptions)) {
pushIssue(issues, 'capabilities.video.durationOptions', 'must be number array')
}
if (video.fpsOptions !== undefined && !isNumberArray(video.fpsOptions)) {
pushIssue(issues, 'capabilities.video.fpsOptions', 'must be number array')
}
if (video.resolutionOptions !== undefined && !isStringArray(video.resolutionOptions)) {
pushIssue(issues, 'capabilities.video.resolutionOptions', 'must be string array')
}
if (video.supportGenerateAudio !== undefined && typeof video.supportGenerateAudio !== 'boolean') {
pushIssue(issues, 'capabilities.video.supportGenerateAudio', 'must be boolean')
}
if (video.firstlastframe !== undefined && typeof video.firstlastframe !== 'boolean') {
pushIssue(issues, 'capabilities.video.firstlastframe', 'must be boolean')
}
validateFieldI18nMap(issues, 'video', video)
}
}
const audio = capabilities.audio
if (audio !== undefined) {
if (!isRecord(audio)) {
pushIssue(issues, 'capabilities.audio', 'audio capabilities must be an object')
} else {
validateAllowedFields(issues, 'audio', audio)
if (audio.voiceOptions !== undefined && !isStringArray(audio.voiceOptions)) {
pushIssue(issues, 'capabilities.audio.voiceOptions', 'must be string array')
}
if (audio.rateOptions !== undefined && !isStringArray(audio.rateOptions)) {
pushIssue(issues, 'capabilities.audio.rateOptions', 'must be string array')
}
validateFieldI18nMap(issues, 'audio', audio)
}
}
const lipsync = capabilities.lipsync
if (lipsync !== undefined) {
if (!isRecord(lipsync)) {
pushIssue(issues, 'capabilities.lipsync', 'lipsync capabilities must be an object')
} else {
validateAllowedFields(issues, 'lipsync', lipsync)
if (lipsync.modeOptions !== undefined && !isStringArray(lipsync.modeOptions)) {
pushIssue(issues, 'capabilities.lipsync.modeOptions', 'must be string array')
}
validateFieldI18nMap(issues, 'lipsync', lipsync)
}
}
return issues
}
async function main() {
let PrismaClient
try {
({ PrismaClient } = await import('@prisma/client'))
} catch {
throw new Error('MISSING_DEPENDENCY: @prisma/client is not installed, run npm install first')
}
prisma = new PrismaClient()
const summary = {
generatedAt: new Date().toISOString(),
userPreference: {
total: 0,
invalidModelKeyFields: 0,
invalidCustomModelsJson: 0,
invalidCustomModelShape: 0,
invalidCapabilities: 0,
},
novelPromotionProject: {
total: 0,
invalidModelKeyFields: 0,
},
samples: [],
}
const userPrefs = await prisma.userPreference.findMany({
select: {
id: true,
customModels: true,
analysisModel: true,
characterModel: true,
locationModel: true,
storyboardModel: true,
editModel: true,
videoModel: true,
},
})
for (const pref of userPrefs) {
summary.userPreference.total += 1
for (const field of MODEL_FIELDS) {
const rawValue = pref[field]
if (!rawValue) continue
if (!parseModelKeyStrict(rawValue)) {
summary.userPreference.invalidModelKeyFields += 1
addSample(summary, {
table: 'userPreference',
rowId: pref.id,
field,
reason: 'model field is not provider::modelId',
})
}
}
if (!pref.customModels) continue
let parsedCustomModels
try {
parsedCustomModels = JSON.parse(pref.customModels)
} catch {
summary.userPreference.invalidCustomModelsJson += 1
addSample(summary, {
table: 'userPreference',
rowId: pref.id,
field: 'customModels',
reason: 'invalid JSON',
})
continue
}
if (!Array.isArray(parsedCustomModels)) {
summary.userPreference.invalidCustomModelsJson += 1
addSample(summary, {
table: 'userPreference',
rowId: pref.id,
field: 'customModels',
reason: 'customModels is not array',
})
continue
}
for (let index = 0; index < parsedCustomModels.length; index += 1) {
const modelRaw = parsedCustomModels[index]
if (!isRecord(modelRaw)) {
summary.userPreference.invalidCustomModelShape += 1
addSample(summary, {
table: 'userPreference',
rowId: pref.id,
field: `customModels[${index}]`,
reason: 'model item is not object',
})
continue
}
const modelKey = isNonEmptyString(modelRaw.modelKey) ? modelRaw.modelKey.trim() : ''
const provider = isNonEmptyString(modelRaw.provider) ? modelRaw.provider.trim() : ''
const modelId = isNonEmptyString(modelRaw.modelId) ? modelRaw.modelId.trim() : ''
const parsed = parseModelKeyStrict(modelKey)
if (!parsed || parsed.provider !== provider || parsed.modelId !== modelId) {
summary.userPreference.invalidCustomModelShape += 1
addSample(summary, {
table: 'userPreference',
rowId: pref.id,
field: `customModels[${index}].modelKey`,
reason: 'modelKey/provider/modelId mismatch',
})
}
const modelType = isNonEmptyString(modelRaw.type) ? modelRaw.type.trim() : ''
const capabilityIssues = validateCapabilities(modelType, modelRaw.capabilities)
if (capabilityIssues.length > 0) {
summary.userPreference.invalidCapabilities += 1
addSample(summary, {
table: 'userPreference',
rowId: pref.id,
field: capabilityIssues[0].field,
reason: capabilityIssues[0].message,
})
}
}
}
const projects = await prisma.novelPromotionProject.findMany({
select: {
id: true,
analysisModel: true,
characterModel: true,
locationModel: true,
storyboardModel: true,
editModel: true,
videoModel: true,
},
})
for (const project of projects) {
summary.novelPromotionProject.total += 1
for (const field of MODEL_FIELDS) {
const rawValue = project[field]
if (!rawValue) continue
if (!parseModelKeyStrict(rawValue)) {
summary.novelPromotionProject.invalidModelKeyFields += 1
addSample(summary, {
table: 'novelPromotionProject',
rowId: project.id,
field,
reason: 'model field is not provider::modelId',
})
}
}
}
process.stdout.write(`${JSON.stringify(summary, null, 2)}\n`)
if (!STRICT) return
const hasViolations = summary.userPreference.invalidModelKeyFields > 0
|| summary.userPreference.invalidCustomModelsJson > 0
|| summary.userPreference.invalidCustomModelShape > 0
|| summary.userPreference.invalidCapabilities > 0
|| summary.novelPromotionProject.invalidModelKeyFields > 0
if (hasViolations) {
process.exitCode = 1
}
}
main()
.catch((error) => {
process.stderr.write(`[check-model-config-contract] failed: ${String(error)}\n`)
process.exitCode = 1
})
.finally(async () => {
if (prisma) {
await prisma.$disconnect()
}
})
+52
View File
@@ -0,0 +1,52 @@
import { execSync } from 'node:child_process'
const ALLOWLIST = new Set<string>([
'src/lib/logging/core.ts',
'src/lib/logging/config.ts',
'src/lib/logging/context.ts',
'src/lib/logging/redact.ts',
'scripts/check-no-console.ts',
'scripts/guards/no-api-direct-llm-call.mjs',
'scripts/guards/no-internal-task-sync-fallback.mjs',
'scripts/guards/no-media-provider-bypass.mjs',
'scripts/guards/no-server-mirror-state.mjs',
'scripts/guards/task-loading-guard.mjs',
'scripts/guards/task-target-states-no-polling-guard.mjs',
])
function run(cmd: string): string {
try {
return execSync(cmd, { encoding: 'utf8' })
} catch (error: unknown) {
if (error && typeof error === 'object' && 'stdout' in error) {
const stdout = (error as { stdout?: unknown }).stdout
return typeof stdout === 'string' ? stdout : ''
}
return ''
}
}
function main() {
const output = run(`rg -n "console\\\\.(log|info|warn|error|debug)\\\\(" src scripts`)
const lines = output
.split('\n')
.map((line) => line.trim())
.filter(Boolean)
const violations = lines.filter((line) => {
const file = line.split(':', 1)[0]
return !ALLOWLIST.has(file)
})
if (violations.length > 0) {
process.stderr.write('[check:logs] found forbidden console usage:\n')
for (const line of violations) {
process.stderr.write(`- ${line}\n`)
}
process.exit(1)
}
process.stdout.write(`[check:logs] ok scanned=${lines.length} allowlist=${ALLOWLIST.size}\n`)
}
main()
@@ -0,0 +1,323 @@
import { prisma } from '@/lib/prisma'
import { TASK_TYPE } from '@/lib/task/types'
type AnyJson = unknown
type Match = {
path: string
value: string
}
type Options = {
minutes: number
limit: number
projectId: string | null
strictNoData: boolean
includeEvents: boolean
maxEventsPerTask: number
json: boolean
}
type FailureType = 'normalize' | 'model' | 'cancelled' | 'other'
const MODEL_ERROR_CODES = new Set([
'GENERATION_FAILED',
'GENERATION_TIMEOUT',
'RATE_LIMIT',
'EXTERNAL_ERROR',
'SENSITIVE_CONTENT',
])
function parseNumberArg(name: string, fallback: number): number {
const raw = process.argv.find((arg) => arg.startsWith(`--${name}=`))
if (!raw) return fallback
const value = Number.parseInt(raw.split('=')[1] || '', 10)
return Number.isFinite(value) && value > 0 ? value : fallback
}
function parseStringArg(name: string): string | null {
const raw = process.argv.find((arg) => arg.startsWith(`--${name}=`))
if (!raw) return null
const value = (raw.split('=')[1] || '').trim()
return value || null
}
function parseBooleanArg(name: string, fallback = false): boolean {
const raw = process.argv.find((arg) => arg.startsWith(`--${name}=`))
if (!raw) return fallback
const value = (raw.split('=')[1] || '').trim().toLowerCase()
return value === '1' || value === 'true' || value === 'yes' || value === 'on'
}
function parseOptions(): Options {
return {
minutes: parseNumberArg('minutes', 60 * 24),
limit: parseNumberArg('limit', 200),
projectId: parseStringArg('projectId'),
strictNoData: parseBooleanArg('strictNoData', false),
includeEvents: parseBooleanArg('includeEvents', false),
maxEventsPerTask: parseNumberArg('maxEventsPerTask', 40),
json: parseBooleanArg('json', false),
}
}
function toExcerpt(value: string, max = 180): string {
if (value.length <= max) return value
return `${value.slice(0, max)}...`
}
function findStringMatches(
value: AnyJson,
predicate: (input: string) => boolean,
path = '$',
matches: Match[] = [],
): Match[] {
if (typeof value === 'string') {
if (predicate(value)) matches.push({ path, value })
return matches
}
if (Array.isArray(value)) {
value.forEach((item, index) => {
findStringMatches(item, predicate, `${path}[${index}]`, matches)
})
return matches
}
if (value && typeof value === 'object') {
for (const [key, next] of Object.entries(value as Record<string, unknown>)) {
findStringMatches(next, predicate, `${path}.${key}`, matches)
}
}
return matches
}
function classifyFailure(task: {
errorCode: string | null
errorMessage: string | null
result: AnyJson | null
events: Array<{ payload: AnyJson | null }>
}): FailureType {
const code = (task.errorCode || '').trim().toUpperCase()
const normalizeRe = /normalize|video_frame_normalize|normalizeReferenceImagesForGeneration|reference image normalize failed|outbound image input is empty|relative_path_rejected/i
const modelRe = /generation failed|provider|upstream|rate limit|timed out|timeout|sensitive/i
if (code === 'TASK_CANCELLED') return 'cancelled'
if (MODEL_ERROR_CODES.has(code)) return 'model'
if (code) {
const explicitNormalizeCode = code === 'INVALID_PARAMS' || code === 'OUTBOUND_IMAGE_FETCH_FAILED'
if (explicitNormalizeCode) return 'normalize'
return 'other'
}
const values: string[] = []
if (code) values.push(code)
if (task.errorMessage) values.push(task.errorMessage)
if (task.result) {
for (const hit of findStringMatches(task.result, () => true)) {
values.push(hit.value)
}
}
for (const event of task.events) {
if (!event.payload) continue
for (const hit of findStringMatches(event.payload, () => true)) {
values.push(hit.value)
}
}
if (values.some((item) => normalizeRe.test(item))) return 'normalize'
if (values.some((item) => modelRe.test(item))) return 'model'
return 'other'
}
async function main() {
const options = parseOptions()
const since = new Date(Date.now() - options.minutes * 60_000)
const monitoredTypes = [
TASK_TYPE.MODIFY_ASSET_IMAGE,
TASK_TYPE.ASSET_HUB_MODIFY,
TASK_TYPE.VIDEO_PANEL,
]
const tasks = await prisma.task.findMany({
where: {
type: { in: monitoredTypes },
createdAt: { gte: since },
...(options.projectId ? { projectId: options.projectId } : {}),
},
select: {
id: true,
type: true,
status: true,
projectId: true,
targetType: true,
targetId: true,
createdAt: true,
errorCode: true,
errorMessage: true,
payload: true,
result: true,
},
orderBy: { createdAt: 'desc' },
take: options.limit,
})
if (tasks.length === 0) {
process.stdout.write(
`[check:outbound-image-runtime-sample] no data window=${options.minutes}m limit=${options.limit} strictNoData=${options.strictNoData}\n`,
)
if (options.strictNoData) process.exit(2)
return
}
const eventsByTaskId = new Map<string, Array<{ eventType: string; payload: AnyJson | null; createdAt: Date }>>()
let eventCount = 0
if (options.includeEvents) {
for (const task of tasks) {
const rows = await prisma.taskEvent.findMany({
where: { taskId: task.id },
select: {
taskId: true,
eventType: true,
payload: true,
createdAt: true,
},
orderBy: { id: 'desc' },
take: options.maxEventsPerTask,
})
const ordered = [...rows].reverse()
eventCount += ordered.length
if (ordered.length > 0) {
eventsByTaskId.set(
task.id,
ordered.map((event) => ({
eventType: event.eventType,
payload: event.payload,
createdAt: event.createdAt,
})),
)
}
}
}
const nextImagePredicate = (input: string) => input.includes('/_next/image')
const hits: Array<{
taskId: string
taskType: string
source: 'task.payload' | 'task.result' | 'task.event'
path: string
value: string
}> = []
let failedCount = 0
const failedByClass: Record<FailureType, number> = {
normalize: 0,
model: 0,
cancelled: 0,
other: 0,
}
const failedByCode: Record<string, number> = {}
for (const task of tasks) {
const taskEventsForTask = eventsByTaskId.get(task.id) || []
if (task.payload) {
for (const match of findStringMatches(task.payload, nextImagePredicate)) {
hits.push({
taskId: task.id,
taskType: task.type,
source: 'task.payload',
path: match.path,
value: match.value,
})
}
}
if (task.result) {
for (const match of findStringMatches(task.result, nextImagePredicate)) {
hits.push({
taskId: task.id,
taskType: task.type,
source: 'task.result',
path: match.path,
value: match.value,
})
}
}
for (const event of taskEventsForTask) {
if (!event.payload) continue
for (const match of findStringMatches(event.payload, nextImagePredicate)) {
hits.push({
taskId: task.id,
taskType: task.type,
source: 'task.event',
path: match.path,
value: match.value,
})
}
}
if (task.status === 'failed') {
failedCount += 1
const code = (task.errorCode || 'UNKNOWN').trim() || 'UNKNOWN'
failedByCode[code] = (failedByCode[code] || 0) + 1
const failureType = classifyFailure({
errorCode: task.errorCode,
errorMessage: task.errorMessage,
result: task.result,
events: taskEventsForTask,
})
failedByClass[failureType] += 1
}
}
const typeCount = tasks.reduce<Record<string, number>>((acc, item) => {
acc[item.type] = (acc[item.type] || 0) + 1
return acc
}, {})
process.stdout.write(
`[check:outbound-image-runtime-sample] window=${options.minutes}m sampled=${tasks.length} events=${eventCount} includeEvents=${options.includeEvents} next_image_hits=${hits.length}\n`,
)
process.stdout.write(`[check:outbound-image-runtime-sample] task_types=${JSON.stringify(typeCount)}\n`)
process.stdout.write(
`[check:outbound-image-runtime-sample] failures total=${failedCount} normalize=${failedByClass.normalize} model=${failedByClass.model} cancelled=${failedByClass.cancelled} other=${failedByClass.other} by_code=${JSON.stringify(failedByCode)}\n`,
)
if (options.json) {
process.stdout.write(
`${JSON.stringify({
windowMinutes: options.minutes,
sampled: tasks.length,
events: eventCount,
includeEvents: options.includeEvents,
nextImageHits: hits.length,
taskTypes: typeCount,
failures: {
total: failedCount,
byClass: failedByClass,
byCode: failedByCode,
},
})}\n`,
)
}
if (hits.length > 0) {
process.stderr.write('[check:outbound-image-runtime-sample] found /_next/image contamination:\n')
for (const hit of hits.slice(0, 20)) {
process.stderr.write(
`- task=${hit.taskId} type=${hit.taskType} source=${hit.source} path=${hit.path} value=${toExcerpt(hit.value)}\n`,
)
}
process.exit(1)
}
}
main()
.catch((error) => {
const message = error instanceof Error ? error.message : String(error)
process.stderr.write(`[check:outbound-image-runtime-sample] failed: ${message}\n`)
process.exit(1)
})
.finally(async () => {
await prisma.$disconnect()
})
@@ -0,0 +1,224 @@
import { prisma } from '@/lib/prisma'
import { TASK_STATUS, TASK_TYPE } from '@/lib/task/types'
type StatusCount = Record<string, number>
type WindowSummary = {
total: number
finishedTotal: number
completed: number
failed: number
successRate: number | null
byStatus: StatusCount
byType: Record<string, number>
}
type Options = {
minutes: number
baselineMinutes: number
baselineOffsetMinutes: number
projectId: string | null
tolerancePct: number
minFinishedSamples: number
strict: boolean
json: boolean
}
const DEFAULT_MINUTES = 60 * 24 * 7
const DEFAULT_TOLERANCE_PCT = 2
const DEFAULT_MIN_FINISHED_SAMPLES = 20
function parseNumberArg(name: string, fallback: number): number {
const raw = process.argv.find((arg) => arg.startsWith(`--${name}=`))
if (!raw) return fallback
const value = Number.parseFloat(raw.split('=')[1] || '')
return Number.isFinite(value) && value > 0 ? value : fallback
}
function parseBooleanArg(name: string, fallback = false): boolean {
const raw = process.argv.find((arg) => arg.startsWith(`--${name}=`))
if (!raw) return fallback
const value = (raw.split('=')[1] || '').trim().toLowerCase()
return value === '1' || value === 'true' || value === 'yes' || value === 'on'
}
function parseStringArg(name: string): string | null {
const raw = process.argv.find((arg) => arg.startsWith(`--${name}=`))
if (!raw) return null
const value = (raw.split('=')[1] || '').trim()
return value || null
}
function parseOptions(): Options {
const minutes = parseNumberArg('minutes', DEFAULT_MINUTES)
const baselineMinutes = parseNumberArg('baselineMinutes', minutes)
const baselineOffsetMinutes = parseNumberArg('baselineOffsetMinutes', minutes)
return {
minutes,
baselineMinutes,
baselineOffsetMinutes,
projectId: parseStringArg('projectId'),
tolerancePct: parseNumberArg('tolerancePct', DEFAULT_TOLERANCE_PCT),
minFinishedSamples: parseNumberArg('minFinishedSamples', DEFAULT_MIN_FINISHED_SAMPLES),
strict: parseBooleanArg('strict', false),
json: parseBooleanArg('json', false),
}
}
function asPct(value: number | null): string {
return value === null ? 'N/A' : `${value.toFixed(2)}%`
}
function getSuccessRate(completed: number, failed: number): number | null {
const total = completed + failed
if (total <= 0) return null
return (completed / total) * 100
}
function summarizeRows(
rows: Array<{ status: string; type: string }>,
): WindowSummary {
const byStatus: StatusCount = {}
const byType: Record<string, number> = {}
for (const row of rows) {
byStatus[row.status] = (byStatus[row.status] || 0) + 1
byType[row.type] = (byType[row.type] || 0) + 1
}
const completed = byStatus[TASK_STATUS.COMPLETED] || 0
const failed = byStatus[TASK_STATUS.FAILED] || 0
const finishedTotal = completed + failed
return {
total: rows.length,
finishedTotal,
completed,
failed,
successRate: getSuccessRate(completed, failed),
byStatus,
byType,
}
}
async function fetchWindowSummary(params: {
from: Date
to: Date
projectId: string | null
}) {
const monitoredTypes = [
TASK_TYPE.MODIFY_ASSET_IMAGE,
TASK_TYPE.ASSET_HUB_MODIFY,
TASK_TYPE.VIDEO_PANEL,
]
const rows = await prisma.task.findMany({
where: {
type: { in: monitoredTypes },
createdAt: {
gte: params.from,
lt: params.to,
},
...(params.projectId ? { projectId: params.projectId } : {}),
},
select: {
status: true,
type: true,
},
})
return summarizeRows(rows)
}
async function main() {
const options = parseOptions()
const now = Date.now()
const currentEnd = new Date(now)
const currentStart = new Date(now - options.minutes * 60_000)
const baselineEnd = new Date(now - options.baselineOffsetMinutes * 60_000)
const baselineStart = new Date(baselineEnd.getTime() - options.baselineMinutes * 60_000)
const [current, baseline] = await Promise.all([
fetchWindowSummary({
from: currentStart,
to: currentEnd,
projectId: options.projectId,
}),
fetchWindowSummary({
from: baselineStart,
to: baselineEnd,
projectId: options.projectId,
}),
])
const hasEnoughCurrent = current.finishedTotal >= options.minFinishedSamples
const hasEnoughBaseline = baseline.finishedTotal >= options.minFinishedSamples
const hasEnoughSamples = hasEnoughCurrent && hasEnoughBaseline
const rateDeltaPct =
current.successRate !== null && baseline.successRate !== null
? current.successRate - baseline.successRate
: null
const meetsTolerance =
rateDeltaPct !== null
? rateDeltaPct >= -Math.abs(options.tolerancePct)
: false
const status = hasEnoughSamples
? meetsTolerance
? 'pass'
: 'fail'
: 'blocked'
process.stdout.write(
`[check:outbound-image-success-rate] current=${asPct(current.successRate)} baseline=${asPct(baseline.successRate)} delta=${asPct(rateDeltaPct)} tolerance=-${Math.abs(options.tolerancePct).toFixed(2)}% status=${status}\n`,
)
process.stdout.write(
`[check:outbound-image-success-rate] current_finished=${current.finishedTotal} baseline_finished=${baseline.finishedTotal} min_required=${options.minFinishedSamples}\n`,
)
process.stdout.write(
`[check:outbound-image-success-rate] current_by_type=${JSON.stringify(current.byType)} baseline_by_type=${JSON.stringify(baseline.byType)}\n`,
)
if (options.json) {
process.stdout.write(
`${JSON.stringify({
status,
tolerancePct: options.tolerancePct,
minFinishedSamples: options.minFinishedSamples,
windows: {
current: {
from: currentStart.toISOString(),
to: currentEnd.toISOString(),
...current,
},
baseline: {
from: baselineStart.toISOString(),
to: baselineEnd.toISOString(),
...baseline,
},
},
rateDeltaPct,
hasEnoughSamples,
})}\n`,
)
}
if (!options.strict) return
if (status === 'pass') return
if (status === 'blocked') process.exit(2)
process.exit(1)
}
main()
.catch((error) => {
const message = error instanceof Error ? error.message : String(error)
process.stderr.write(`[check:outbound-image-success-rate] failed: ${message}\n`)
process.exit(1)
})
.finally(async () => {
await prisma.$disconnect()
})
+167
View File
@@ -0,0 +1,167 @@
import fs from 'node:fs'
import path from 'node:path'
type Rule = {
file: string
pattern: RegExp
message: string
}
function readFile(relativePath: string): string {
const fullPath = path.resolve(process.cwd(), relativePath)
return fs.readFileSync(fullPath, 'utf8')
}
const mustIncludeRules: Rule[] = [
{
file: 'src/lib/media/outbound-image.ts',
pattern: /export\s+async\s+function\s+normalizeToOriginalMediaUrl\s*\(/,
message: 'missing normalizeToOriginalMediaUrl export',
},
{
file: 'src/lib/media/outbound-image.ts',
pattern: /export\s+async\s+function\s+normalizeToBase64ForGeneration\s*\(/,
message: 'missing normalizeToBase64ForGeneration export',
},
{
file: 'src/lib/media/outbound-image.ts',
pattern: /export\s+async\s+function\s+normalizeReferenceImagesForGeneration\s*\(/,
message: 'missing normalizeReferenceImagesForGeneration export',
},
{
file: 'src/lib/media/outbound-image.ts',
pattern: /class\s+OutboundImageNormalizeError\s+extends\s+Error/,
message: 'outbound-image.ts must expose structured normalize error type',
},
{
file: 'src/lib/media/outbound-image.ts',
pattern: /OUTBOUND_IMAGE_FETCH_FAILED/,
message: 'outbound-image.ts must classify fetch failures with structured error codes',
},
{
file: 'src/lib/media/outbound-image.ts',
pattern: /OUTBOUND_IMAGE_REFERENCE_ALL_FAILED/,
message: 'outbound-image.ts must fail explicitly when all references fail to normalize',
},
{
file: 'src/lib/workers/handlers/image-task-handlers-core.ts',
pattern: /normalizeToBase64ForGeneration\(currentUrl\)/,
message: 'image-task-handlers-core.ts must convert currentUrl to base64 before outbound',
},
{
file: 'src/lib/workers/handlers/image-task-handlers-core.ts',
pattern: /normalizeReferenceImagesForGeneration\(extraReferenceInputs\)/,
message: 'image-task-handlers-core.ts must normalize extra references before outbound',
},
{
file: 'src/lib/workers/video.worker.ts',
pattern: /const\s+sourceImageBase64\s*=\s*await\s+normalizeToBase64ForGeneration\(sourceImageUrl\)/,
message: 'video.worker.ts must normalize source frame to base64',
},
{
file: 'src/lib/workers/video.worker.ts',
pattern: /lastFrameImageBase64\s*=\s*await\s+normalizeToBase64ForGeneration\(lastFrameUrl\)/,
message: 'video.worker.ts must normalize last frame to base64',
},
{
file: 'src/app/api/novel-promotion/[projectId]/modify-asset-image/route.ts',
pattern: /sanitizeImageInputsForTaskPayload/,
message: 'modify-asset-image route must sanitize image inputs',
},
{
file: 'src/app/api/novel-promotion/[projectId]/modify-storyboard-image/route.ts',
pattern: /sanitizeImageInputsForTaskPayload/,
message: 'modify-storyboard-image route must sanitize image inputs',
},
{
file: 'src/app/api/asset-hub/modify-image/route.ts',
pattern: /sanitizeImageInputsForTaskPayload/,
message: 'asset-hub modify-image route must sanitize image inputs',
},
{
file: 'src/components/ui/ImagePreviewModal.tsx',
pattern: /import\s+\{\s*resolveOriginalImageUrl,\s*toDisplayImageUrl\s*\}\s+from\s+'@\/lib\/media\/image-url'/,
message: 'ImagePreviewModal must use shared image-url helpers',
},
{
file: 'src/lib/novel-promotion/stages/video-stage-runtime-core.tsx',
pattern: /onPreviewImage=\{setPreviewImage\}/,
message: 'Video stage runtime must wire preview callback to VideoPanelCard',
},
{
file: 'src/app/[locale]/workspace/[projectId]/modes/novel-promotion/components/video/panel-card/types.ts',
pattern: /onPreviewImage\?:\s*\(imageUrl:\s*string\)\s*=>\s*void/,
message: 'VideoPanelCard runtime props must expose onPreviewImage',
},
{
file: 'src/app/[locale]/workspace/[projectId]/modes/novel-promotion/components/video/panel-card/VideoPanelCardHeader.tsx',
pattern: /className="absolute left-1\/2 top-1\/2 z-10 h-16 w-16 -translate-x-1\/2 -translate-y-1\/2 rounded-full"/,
message: 'VideoPanelCard play trigger must be centered small button (preview/play separation)',
},
]
const mustNotIncludeRules: Rule[] = [
{
file: 'src/lib/workers/handlers/image-task-handlers-core.ts',
pattern: /referenceImages:\s*\[currentUrl\]/,
message: 'image-task-handlers-core.ts must not pass raw currentUrl directly as outbound reference',
},
{
file: 'src/lib/workers/video.worker.ts',
pattern: /imageUrl:\s*sourceImageUrl/,
message: 'video.worker.ts must not pass raw sourceImageUrl to generator',
},
{
file: 'src/lib/media/outbound-image.ts',
pattern: /return\s+await\s+toFetchableAbsoluteUrl\(mediaPath\)/,
message: 'outbound-image.ts must not silently fallback when /m route cannot resolve storage key',
},
{
file: 'src/lib/media/outbound-image.ts',
pattern: /export\s+async\s+function\s+imageUrlToBase64\s*\(/,
message: 'outbound-image.ts must not keep legacy imageUrlToBase64 alias after phase 2 migration',
},
{
file: 'src/lib/media/outbound-image.ts',
pattern: /return\s+await\s+toFetchableAbsoluteUrl\(unwrappedInput\)/,
message: 'outbound-image.ts must not silently fallback unknown inputs to fetchable url',
},
]
function main() {
const errors: string[] = []
const cache = new Map<string, string>()
const getContent = (file: string) => {
if (!cache.has(file)) cache.set(file, readFile(file))
return cache.get(file) as string
}
for (const rule of mustIncludeRules) {
const content = getContent(rule.file)
if (!rule.pattern.test(content)) {
errors.push(`${rule.file}: ${rule.message}`)
}
}
for (const rule of mustNotIncludeRules) {
const content = getContent(rule.file)
if (rule.pattern.test(content)) {
errors.push(`${rule.file}: ${rule.message}`)
}
}
if (errors.length > 0) {
process.stderr.write('[check:outbound-image-unification] found violations:\n')
for (const error of errors) {
process.stderr.write(`- ${error}\n`)
}
process.exit(1)
}
process.stdout.write(
`[check:outbound-image-unification] ok include_checks=${mustIncludeRules.length} exclude_checks=${mustNotIncludeRules.length}\n`,
)
}
main()
+293
View File
@@ -0,0 +1,293 @@
import { promises as fs } from 'node:fs'
import path from 'node:path'
const CATALOG_DIR = path.resolve(process.cwd(), 'standards/pricing')
const CAPABILITY_CATALOG_FILE = path.resolve(process.cwd(), 'standards/capabilities/image-video.catalog.json')
const API_TYPES = new Set(['text', 'image', 'video', 'voice', 'voice-design', 'lip-sync'])
const PRICING_MODES = new Set(['flat', 'capability'])
const TEXT_TOKEN_TYPES = new Set(['input', 'output'])
function isRecord(value) {
return !!value && typeof value === 'object' && !Array.isArray(value)
}
function isNonEmptyString(value) {
return typeof value === 'string' && value.trim().length > 0
}
function isCapabilityValue(value) {
return typeof value === 'string' || typeof value === 'number' || typeof value === 'boolean'
}
function isFiniteNumber(value) {
return typeof value === 'number' && Number.isFinite(value)
}
function pushIssue(issues, file, index, field, message) {
issues.push({ file, index, field, message })
}
function getProviderKey(providerId) {
const marker = providerId.indexOf(':')
return marker === -1 ? providerId : providerId.slice(0, marker)
}
function buildModelKey(modelType, provider, modelId) {
return `${modelType}::${provider}::${modelId}`
}
async function listCatalogFiles() {
const entries = await fs.readdir(CATALOG_DIR, { withFileTypes: true })
return entries
.filter((entry) => entry.isFile() && entry.name.endsWith('.json'))
.map((entry) => path.join(CATALOG_DIR, entry.name))
}
async function readCatalog(filePath) {
const raw = await fs.readFile(filePath, 'utf8')
const parsed = JSON.parse(raw)
if (!Array.isArray(parsed)) {
throw new Error(`catalog must be an array: ${filePath}`)
}
return parsed
}
async function readCapabilityCatalog() {
const raw = await fs.readFile(CAPABILITY_CATALOG_FILE, 'utf8')
const parsed = JSON.parse(raw)
if (!Array.isArray(parsed)) {
throw new Error(`capability catalog must be an array: ${CAPABILITY_CATALOG_FILE}`)
}
return parsed
}
function extractCapabilityOptionFields(modelType, capabilities) {
if (!isRecord(capabilities)) return new Set()
const namespace = capabilities[modelType]
if (!isRecord(namespace)) return new Set()
const fields = new Set()
for (const [key, value] of Object.entries(namespace)) {
if (!key.endsWith('Options')) continue
if (!Array.isArray(value) || value.length === 0) continue
const field = key.slice(0, -'Options'.length)
fields.add(field)
}
return fields
}
function buildCapabilityOptionFieldMap(capabilityEntries) {
const map = new Map()
for (const entry of capabilityEntries) {
if (!isRecord(entry)) continue
const modelType = typeof entry.modelType === 'string' ? entry.modelType.trim() : ''
const provider = typeof entry.provider === 'string' ? entry.provider.trim() : ''
const modelId = typeof entry.modelId === 'string' ? entry.modelId.trim() : ''
if (!modelType || !provider || !modelId) continue
const fields = extractCapabilityOptionFields(modelType, entry.capabilities)
map.set(buildModelKey(modelType, provider, modelId), fields)
const providerKey = getProviderKey(provider)
const fallbackKey = buildModelKey(modelType, providerKey, modelId)
if (!map.has(fallbackKey)) {
map.set(fallbackKey, fields)
}
}
return map
}
function validateTier(issues, file, index, tier, tierIndex) {
if (!isRecord(tier)) {
pushIssue(issues, file, index, `pricing.tiers[${tierIndex}]`, 'tier must be object')
return
}
if (!isRecord(tier.when) || Object.keys(tier.when).length === 0) {
pushIssue(issues, file, index, `pricing.tiers[${tierIndex}].when`, 'when must be non-empty object')
} else {
for (const [field, value] of Object.entries(tier.when)) {
if (!isCapabilityValue(value)) {
pushIssue(
issues,
file,
index,
`pricing.tiers[${tierIndex}].when.${field}`,
'condition value must be string/number/boolean',
)
}
}
}
if (!isFiniteNumber(tier.amount) || tier.amount < 0) {
pushIssue(issues, file, index, `pricing.tiers[${tierIndex}].amount`, 'amount must be finite number >= 0')
}
}
function validateTextCapabilityTiers(issues, file, index, tiers) {
const seenTokenTypes = new Set()
for (let tierIndex = 0; tierIndex < tiers.length; tierIndex += 1) {
const tier = tiers[tierIndex]
if (!isRecord(tier) || !isRecord(tier.when)) continue
const whenFields = Object.keys(tier.when)
if (whenFields.length !== 1 || whenFields[0] !== 'tokenType') {
pushIssue(issues, file, index, `pricing.tiers[${tierIndex}].when`, 'text capability tier must only contain tokenType')
continue
}
const tokenType = tier.when.tokenType
if (typeof tokenType !== 'string' || !TEXT_TOKEN_TYPES.has(tokenType)) {
pushIssue(issues, file, index, `pricing.tiers[${tierIndex}].when.tokenType`, 'tokenType must be input or output')
continue
}
if (seenTokenTypes.has(tokenType)) {
pushIssue(issues, file, index, `pricing.tiers[${tierIndex}].when.tokenType`, `duplicate tokenType tier: ${tokenType}`)
continue
}
seenTokenTypes.add(tokenType)
}
for (const requiredTokenType of TEXT_TOKEN_TYPES) {
if (!seenTokenTypes.has(requiredTokenType)) {
pushIssue(issues, file, index, 'pricing.tiers', `missing text tier tokenType=${requiredTokenType}`)
}
}
}
function validateMediaCapabilityTierFields(issues, file, index, item, tiers, capabilityOptionFieldsMap) {
const modelType = item.apiType
const provider = item.provider
const modelId = item.modelId
const modelKey = buildModelKey(modelType, provider, modelId)
const fallbackKey = buildModelKey(modelType, getProviderKey(provider), modelId)
const optionFields = capabilityOptionFieldsMap.get(modelKey) || capabilityOptionFieldsMap.get(fallbackKey)
if (!optionFields || optionFields.size === 0) {
pushIssue(issues, file, index, 'pricing.tiers', `no capability option fields found for ${modelType} ${provider}/${modelId}`)
return
}
for (let tierIndex = 0; tierIndex < tiers.length; tierIndex += 1) {
const tier = tiers[tierIndex]
if (!isRecord(tier) || !isRecord(tier.when)) continue
for (const field of Object.keys(tier.when)) {
if (!optionFields.has(field)) {
pushIssue(
issues,
file,
index,
`pricing.tiers[${tierIndex}].when.${field}`,
`field ${field} is not declared in capabilities options for ${modelType} ${provider}/${modelId}`,
)
}
}
}
}
function validateDuplicateCapabilityTiers(issues, file, index, tiers) {
const seen = new Set()
for (let tierIndex = 0; tierIndex < tiers.length; tierIndex += 1) {
const tier = tiers[tierIndex]
if (!isRecord(tier) || !isRecord(tier.when)) continue
const signature = JSON.stringify(Object.entries(tier.when).sort((left, right) => left[0].localeCompare(right[0])))
if (seen.has(signature)) {
pushIssue(issues, file, index, `pricing.tiers[${tierIndex}].when`, 'duplicate capability tier condition')
continue
}
seen.add(signature)
}
}
function validatePricing(issues, file, index, item, capabilityOptionFieldsMap) {
const pricing = item.pricing
if (!isRecord(pricing)) {
pushIssue(issues, file, index, 'pricing', 'pricing must be object')
return
}
if (!isNonEmptyString(pricing.mode) || !PRICING_MODES.has(pricing.mode)) {
pushIssue(issues, file, index, 'pricing.mode', 'pricing.mode must be flat or capability')
return
}
if (pricing.mode === 'flat') {
if (!isFiniteNumber(pricing.flatAmount) || pricing.flatAmount < 0) {
pushIssue(issues, file, index, 'pricing.flatAmount', 'flatAmount must be finite number >= 0')
}
return
}
if (!Array.isArray(pricing.tiers) || pricing.tiers.length === 0) {
pushIssue(issues, file, index, 'pricing.tiers', 'tiers must be non-empty array')
return
}
for (let tierIndex = 0; tierIndex < pricing.tiers.length; tierIndex += 1) {
validateTier(issues, file, index, pricing.tiers[tierIndex], tierIndex)
}
validateDuplicateCapabilityTiers(issues, file, index, pricing.tiers)
if (item.apiType === 'text') {
validateTextCapabilityTiers(issues, file, index, pricing.tiers)
return
}
if (item.apiType === 'image' || item.apiType === 'video') {
validateMediaCapabilityTierFields(issues, file, index, item, pricing.tiers, capabilityOptionFieldsMap)
}
}
async function main() {
const issues = []
const files = await listCatalogFiles()
const capabilityCatalog = await readCapabilityCatalog()
const capabilityOptionFieldsMap = buildCapabilityOptionFieldMap(capabilityCatalog)
if (files.length === 0) {
throw new Error(`no pricing files found in ${CATALOG_DIR}`)
}
for (const filePath of files) {
const items = await readCatalog(filePath)
for (let index = 0; index < items.length; index += 1) {
const item = items[index]
if (!isRecord(item)) {
pushIssue(issues, filePath, index, 'entry', 'entry must be object')
continue
}
if (!isNonEmptyString(item.apiType) || !API_TYPES.has(item.apiType)) {
pushIssue(issues, filePath, index, 'apiType', 'apiType must be one of text/image/video/voice/voice-design/lip-sync')
}
if (!isNonEmptyString(item.provider)) {
pushIssue(issues, filePath, index, 'provider', 'provider must be non-empty string')
}
if (!isNonEmptyString(item.modelId)) {
pushIssue(issues, filePath, index, 'modelId', 'modelId must be non-empty string')
}
validatePricing(issues, filePath, index, item, capabilityOptionFieldsMap)
}
}
if (issues.length === 0) {
process.stdout.write(`[check-pricing-catalog] OK (${files.length} files)\n`)
return
}
const maxPrint = 50
for (const issue of issues.slice(0, maxPrint)) {
process.stdout.write(`[check-pricing-catalog] ${issue.file}#${issue.index} ${issue.field}: ${issue.message}\n`)
}
if (issues.length > maxPrint) {
process.stdout.write(`[check-pricing-catalog] ... ${issues.length - maxPrint} more issues\n`)
}
process.exitCode = 1
}
main().catch((error) => {
process.stderr.write(`[check-pricing-catalog] failed: ${String(error)}\n`)
process.exitCode = 1
})
+197
View File
@@ -0,0 +1,197 @@
import { prisma } from '@/lib/prisma'
type CharacterVoiceRecord = {
id: string
customVoiceUrl: string | null
}
type SpeakerVoiceConfig = {
voiceType?: unknown
voiceId?: unknown
audioUrl?: unknown
[key: string]: unknown
}
type CleanupSummary = {
projectCharactersUpdated: number
globalCharactersUpdated: number
episodeSpeakerVoicesUpdated: number
episodeSpeakerVoicesCleared: number
invalidSpeakerVoicesSkipped: number
}
function hasPlayableAudioUrl(value: unknown) {
return typeof value === 'string' && value.trim().length > 0
}
function normalizeVoiceType(customVoiceUrl: string | null) {
return hasPlayableAudioUrl(customVoiceUrl) ? 'custom' : null
}
async function cleanupCharacterTable(records: CharacterVoiceRecord[], table: 'project' | 'global') {
let updated = 0
for (const row of records) {
const nextVoiceType = normalizeVoiceType(row.customVoiceUrl)
if (table === 'project') {
await prisma.novelPromotionCharacter.update({
where: { id: row.id },
data: {
voiceType: nextVoiceType,
voiceId: null,
},
})
} else {
await prisma.globalCharacter.update({
where: { id: row.id },
data: {
voiceType: nextVoiceType,
voiceId: null,
},
})
}
updated += 1
}
return updated
}
function normalizeSpeakerVoices(payload: string): {
ok: true
changed: boolean
cleared: boolean
next: string | null
} | {
ok: false
} {
let parsed: unknown
try {
parsed = JSON.parse(payload)
} catch {
return { ok: false }
}
if (!parsed || typeof parsed !== 'object' || Array.isArray(parsed)) {
return { ok: false }
}
const source = parsed as Record<string, unknown>
const next: Record<string, SpeakerVoiceConfig> = {}
let changed = false
for (const [speaker, value] of Object.entries(source)) {
if (!value || typeof value !== 'object' || Array.isArray(value)) {
return { ok: false }
}
const config = { ...(value as SpeakerVoiceConfig) }
if (config.voiceType === 'azure') {
if (hasPlayableAudioUrl(config.audioUrl)) {
config.voiceType = 'custom'
config.voiceId = null
next[speaker] = config
} else {
// No usable audio, drop stale azure speaker config.
}
changed = true
continue
}
next[speaker] = config
}
const keys = Object.keys(next)
if (keys.length === 0) {
return {
ok: true,
changed,
cleared: true,
next: null,
}
}
return {
ok: true,
changed,
cleared: false,
next: changed ? JSON.stringify(next) : payload,
}
}
async function main() {
const summary: CleanupSummary = {
projectCharactersUpdated: 0,
globalCharactersUpdated: 0,
episodeSpeakerVoicesUpdated: 0,
episodeSpeakerVoicesCleared: 0,
invalidSpeakerVoicesSkipped: 0,
}
const [projectCharacters, globalCharacters] = await Promise.all([
prisma.novelPromotionCharacter.findMany({
where: { voiceType: 'azure' },
select: {
id: true,
customVoiceUrl: true,
},
}),
prisma.globalCharacter.findMany({
where: { voiceType: 'azure' },
select: {
id: true,
customVoiceUrl: true,
},
}),
])
summary.projectCharactersUpdated = await cleanupCharacterTable(projectCharacters, 'project')
summary.globalCharactersUpdated = await cleanupCharacterTable(globalCharacters, 'global')
const episodes = await prisma.novelPromotionEpisode.findMany({
where: {
speakerVoices: { not: null },
},
select: {
id: true,
speakerVoices: true,
},
})
for (const row of episodes) {
const speakerVoices = row.speakerVoices
if (!speakerVoices || !speakerVoices.includes('"voiceType":"azure"')) {
continue
}
const normalized = normalizeSpeakerVoices(speakerVoices)
if (!normalized.ok) {
summary.invalidSpeakerVoicesSkipped += 1
continue
}
if (!normalized.changed) {
continue
}
await prisma.novelPromotionEpisode.update({
where: { id: row.id },
data: {
speakerVoices: normalized.next,
},
})
summary.episodeSpeakerVoicesUpdated += 1
if (normalized.cleared) {
summary.episodeSpeakerVoicesCleared += 1
}
}
process.stdout.write(`${JSON.stringify({
ok: true,
checkedAt: new Date().toISOString(),
summary,
}, null, 2)}\n`)
}
main()
.catch((error) => {
process.stderr.write(`${error instanceof Error ? error.stack || error.message : String(error)}\n`)
process.exitCode = 1
})
.finally(async () => {
await prisma.$disconnect()
})
+188
View File
@@ -0,0 +1,188 @@
/**
* 诊断项目任务状态
* 运行: npx tsx scripts/diagnose-project.ts <projectId>
*/
import { config } from 'dotenv'
config()
import { prisma } from '../src/lib/prisma'
async function diagnoseProject(projectId: string) {
console.log(`🔍 诊断项目: ${projectId}\n`)
// 1. 检查项目是否存在
console.log('1️⃣ 项目基本信息:')
const project = await prisma.project.findUnique({
where: { id: projectId },
include: {
novelPromotionData: true
}
})
if (!project) {
console.log(' ❌ 项目不存在')
process.exit(1)
}
console.log(` 名称: ${project.name}`)
console.log(` 模式: ${project.mode}`)
console.log(` 用户ID: ${project.userId}`)
// 2. 检查 NovelPromotionProject
console.log('\n2️⃣ 小说推广项目配置:')
const novelData = project.novelPromotionData
if (!novelData) {
console.log(' ❌ novelPromotionData 未创建')
} else {
console.log(` ID: ${novelData.id}`)
console.log(` 视频比例: ${novelData.videoRatio || '未设置'}`)
console.log(` 画风提示: ${novelData.artStylePrompt || '未设置'}`)
}
// 3. 检查场景和场景图片
console.log('\n3️⃣ 场景资产:')
const novelProjectId = novelData?.id
if (!novelProjectId) {
console.log(' ❌ 无法获取 novelPromotionProject ID')
process.exit(1)
}
const locations = await prisma.novelPromotionLocation.findMany({
where: { novelPromotionProjectId: novelProjectId },
include: {
images: {
orderBy: { imageIndex: 'asc' }
}
}
})
console.log(` 场景数量: ${locations.length}`)
for (const loc of locations) {
console.log(`\n 📍 ${loc.name} (${loc.id})`)
console.log(` 图片数量: ${loc.images?.length || 0}`)
for (const img of loc.images || []) {
console.log(` - [${img.imageIndex}] imageUrl: ${img.imageUrl || 'null'}`)
console.log(` isSelected: ${img.isSelected}`)
console.log(` description: ${img.description || 'null'}`)
// 检查 MediaObject
if (img.imageUrl) {
const media = await prisma.mediaObject.findFirst({
where: {
OR: [
{ storageKey: img.imageUrl },
{ storageKey: { contains: img.imageUrl.split('/').pop() || '' } }
]
}
})
if (media) {
console.log(` ✅ MediaObject: ${media.publicId}`)
} else {
console.log(` ⚠️ 未找到 MediaObject`)
}
}
}
}
// 4. 检查最近的任务
console.log('\n4️⃣ 最近的任务:')
const tasks = await prisma.task.findMany({
where: { projectId },
orderBy: { createdAt: 'desc' },
take: 10
})
console.log(` 任务数量: ${tasks.length}`)
for (const task of tasks) {
console.log(`\n 📝 ${task.type} (${task.id})`)
console.log(` 状态: ${task.status}`)
console.log(` 目标: ${task.targetType} / ${task.targetId}`)
console.log(` 创建时间: ${task.createdAt}`)
console.log(` 更新时间: ${task.updatedAt}`)
if (task.errorMessage || task.errorCode) {
console.log(` ❌ 错误码: ${task.errorCode || 'N/A'}`)
console.log(` ❌ 错误信息: ${task.errorMessage?.substring(0, 200) || 'N/A'}`)
}
// 获取任务事件
const events = await prisma.taskEvent.findMany({
where: { taskId: task.id },
orderBy: { createdAt: 'desc' },
take: 3
})
if (events.length > 0) {
console.log(` 最近事件:`)
for (const event of events) {
console.log(` - ${event.eventType}: ${JSON.stringify(event.payload).substring(0, 100)}`)
}
}
}
// 5. 检查 Worker 队列状态
console.log('\n5️⃣ 检查 Worker 配置:')
console.log(` REDIS_HOST: ${process.env.REDIS_HOST || '未设置'}`)
console.log(` REDIS_PORT: ${process.env.REDIS_PORT || '未设置'}`)
// 尝试连接 Redis
try {
const { Redis } = await import('ioredis')
const redis = new Redis({
host: process.env.REDIS_HOST || 'localhost',
port: parseInt(process.env.REDIS_PORT || '6379'),
maxRetriesPerRequest: 3,
connectTimeout: 5000
})
const pingResult = await redis.ping()
console.log(` ✅ Redis 连接: ${pingResult}`)
// 检查 BullMQ 队列
const queueKeys = await redis.keys('bull:*:id')
console.log(` BullMQ 队列数量: ${queueKeys.length}`)
for (const key of queueKeys.slice(0, 5)) {
const queueName = key.replace('bull:', '').replace(':id', '')
const jobCounts = await redis.hgetall(`bull:${queueName}:id`)
console.log(` - ${queueName}`)
}
redis.disconnect()
} catch (error) {
console.log(` ❌ Redis 连接失败:`, error)
}
// 6. 检查模型配置
console.log('\n6️⃣ 检查用户模型配置:')
const userPreference = await prisma.userPreference.findUnique({
where: { userId: project.userId }
})
if (!userPreference) {
console.log(' ❌ 用户偏好配置不存在')
} else {
console.log(` 角色模型: ${userPreference.characterModel || '未设置'}`)
console.log(` 场景模型: ${userPreference.locationModel || '未设置'}`)
console.log(` 视频模型: ${userPreference.videoModel || '未设置'}`)
console.log(` 编辑模型: ${userPreference.editModel || '未设置'}`)
console.log(` 口型同步模型: ${userPreference.lipSyncModel || '未设置'}`)
console.log(` 分析模型: ${userPreference.analysisModel || '未设置'}`)
}
console.log('\n✨ 诊断完成!')
await prisma.$disconnect()
}
const projectId = process.argv[2]
if (!projectId) {
console.log('用法: npx tsx scripts/diagnose-project.ts <projectId>')
console.log('示例: npx tsx scripts/diagnose-project.ts fae709e9-9215-4b3f-9f53-dad871f09896')
process.exit(1)
}
diagnoseProject(projectId).catch(console.error)
+116
View File
@@ -0,0 +1,116 @@
#!/usr/bin/env node
import fs from 'fs'
import path from 'path'
import process from 'process'
import { pathToFileURL } from 'url'
const root = process.cwd()
const apiDir = path.join(root, 'src', 'app', 'api')
export const API_HANDLER_ALLOWLIST = new Set([
'src/app/api/auth/[...nextauth]/route.ts',
'src/app/api/files/[...path]/route.ts',
'src/app/api/system/boot-id/route.ts',
])
export const PUBLIC_ROUTE_ALLOWLIST = new Set([
'src/app/api/auth/[...nextauth]/route.ts',
'src/app/api/auth/register/route.ts',
'src/app/api/cos/image/route.ts',
'src/app/api/files/[...path]/route.ts',
'src/app/api/storage/sign/route.ts',
'src/app/api/system/boot-id/route.ts',
])
const AUTH_CALL_PATTERNS = [
/\brequireUserAuth\s*\(/,
/\brequireProjectAuth\s*\(/,
/\brequireProjectAuthLight\s*\(/,
]
function fail(title, details = []) {
process.stderr.write(`\n[api-route-contract-guard] ${title}\n`)
for (const detail of details) {
process.stderr.write(` - ${detail}\n`)
}
process.exit(1)
}
function walk(dir, out = []) {
if (!fs.existsSync(dir)) return out
const entries = fs.readdirSync(dir, { withFileTypes: true })
for (const entry of entries) {
if (entry.name === '.git' || entry.name === '.next' || entry.name === 'node_modules') continue
const fullPath = path.join(dir, entry.name)
if (entry.isDirectory()) {
walk(fullPath, out)
continue
}
if (entry.name === 'route.ts') out.push(fullPath)
}
return out
}
function toRel(fullPath) {
return path.relative(root, fullPath).split(path.sep).join('/')
}
function hasApiHandlerWrapper(content) {
return /\bapiHandler\s*\(/.test(content)
}
function hasRequiredAuth(content) {
return AUTH_CALL_PATTERNS.some((pattern) => pattern.test(content))
}
export function inspectRouteContract(relPath, content) {
const violations = []
if (!API_HANDLER_ALLOWLIST.has(relPath) && !hasApiHandlerWrapper(content)) {
violations.push(`${relPath} missing apiHandler wrapper`)
}
if (!PUBLIC_ROUTE_ALLOWLIST.has(relPath) && !hasRequiredAuth(content)) {
violations.push(`${relPath} missing requireUserAuth/requireProjectAuth/requireProjectAuthLight`)
}
return violations
}
export function findApiRouteContractViolations(scanRoot = root) {
const routesRoot = path.join(scanRoot, 'src', 'app', 'api')
return walk(routesRoot)
.map((fullPath) => {
const relPath = path.relative(scanRoot, fullPath).split(path.sep).join('/')
const content = fs.readFileSync(fullPath, 'utf8')
return inspectRouteContract(relPath, content)
})
.flat()
}
export function main() {
if (!fs.existsSync(apiDir)) {
fail('Missing src/app/api directory')
}
const violations = walk(apiDir)
.map((fullPath) => {
const relPath = toRel(fullPath)
const content = fs.readFileSync(fullPath, 'utf8')
return inspectRouteContract(relPath, content)
})
.flat()
if (violations.length > 0) {
fail('Found API route contract violations', violations)
}
process.stdout.write(
`[api-route-contract-guard] OK routes=${walk(apiDir).length} public=${PUBLIC_ROUTE_ALLOWLIST.size} apiHandlerExceptions=${API_HANDLER_ALLOWLIST.size}\n`,
)
}
if (process.argv[1] && import.meta.url === pathToFileURL(process.argv[1]).href) {
main()
}
@@ -0,0 +1,106 @@
#!/usr/bin/env node
import { execSync } from 'node:child_process'
import { pathToFileURL } from 'node:url'
const RULES = [
{
name: 'api',
source: /^src\/app\/api\//,
tests: [/^tests\/integration\/api\/contract\//, /^tests\/system\//, /^tests\/regression\//],
message: 'changing src/app/api/** requires a matching contract, system, or regression test change',
},
{
name: 'worker',
source: /^src\/lib\/workers\//,
tests: [/^tests\/unit\/worker\//, /^tests\/system\//, /^tests\/regression\//],
message: 'changing src/lib/workers/** requires a matching worker, system, or regression test change',
},
{
name: 'task',
source: /^src\/lib\/task\//,
tests: [/^tests\/unit\/task\//, /^tests\/system\//, /^tests\/regression\//],
message: 'changing src/lib/task/** requires a matching task, system, or regression test change',
},
{
name: 'media',
source: /^src\/lib\/media\//,
tests: [/^tests\/unit\//, /^tests\/system\//, /^tests\/regression\//],
message: 'changing src/lib/media/** requires a matching unit, system, or regression test change',
},
{
name: 'provider',
source: /^src\/lib\/(generator-api|generators|model-gateway|lipsync|providers)\//,
tests: [/^tests\/unit\/(providers|model-gateway|llm)\//, /^tests\/integration\/provider\//, /^tests\/system\//, /^tests\/regression\//],
message: 'changing provider/gateway code requires provider contract, system, or regression test change',
},
]
function normalizeChangedFiles(rawFiles) {
return rawFiles
.flatMap((item) => item.split(/[\n,]/))
.map((item) => item.trim())
.filter(Boolean)
}
function readGitChangedFiles() {
try {
const output = execSync('git diff --name-only --cached', {
cwd: process.cwd(),
encoding: 'utf8',
stdio: ['ignore', 'pipe', 'ignore'],
})
return normalizeChangedFiles([output])
} catch {
return []
}
}
export function inspectChangedFiles(changedFiles) {
const changed = normalizeChangedFiles(changedFiles)
const changedTests = changed.filter((file) => file.startsWith('tests/'))
const violations = []
for (const rule of RULES) {
const impactedSources = changed.filter((file) => rule.source.test(file))
if (impactedSources.length === 0) continue
const hasMatchingTestChange = changedTests.some((file) => rule.tests.some((pattern) => pattern.test(file)))
if (!hasMatchingTestChange) {
violations.push(`${rule.name}: ${rule.message}; sources=${impactedSources.join(',')}`)
}
}
return violations
}
function fail(violations) {
console.error('\n[changed-file-test-impact-guard] Missing matching test changes')
for (const violation of violations) {
console.error(` - ${violation}`)
}
process.exit(1)
}
function runCli() {
const inputFiles = process.argv.slice(2)
const changedFiles = inputFiles.length > 0
? normalizeChangedFiles(inputFiles)
: normalizeChangedFiles([process.env.TEST_IMPACT_CHANGED_FILES || '', ...readGitChangedFiles()])
if (changedFiles.length === 0) {
console.log('[changed-file-test-impact-guard] SKIP no changed files detected')
process.exit(0)
}
const violations = inspectChangedFiles(changedFiles)
if (violations.length > 0) {
fail(violations)
}
console.log(`[changed-file-test-impact-guard] OK files=${changedFiles.length}`)
}
const entryHref = process.argv[1] ? pathToFileURL(process.argv[1]).href : null
if (entryHref && import.meta.url === entryHref) {
runCli()
}
+89
View File
@@ -0,0 +1,89 @@
#!/usr/bin/env node
import fs from 'fs'
import path from 'path'
const ROOT = process.cwd()
const RULES = [
{
label: 'component',
dir: 'src',
include: (relPath) =>
relPath.includes('/components/')
&& /\.(ts|tsx)$/.test(relPath),
limit: 500,
},
{
label: 'hook',
dir: 'src',
include: (relPath) =>
(relPath.includes('/hooks/') || /\/use[A-Z].+\.(ts|tsx)$/.test(relPath))
&& /\.(ts|tsx)$/.test(relPath),
limit: 400,
},
{
label: 'worker-handler',
dir: 'src/lib/workers/handlers',
include: (relPath) => /\.(ts|tsx)$/.test(relPath),
limit: 300,
},
{
label: 'mutation',
dir: 'src/lib/query/mutations',
include: (relPath) => /\.(ts|tsx)$/.test(relPath) && !relPath.endsWith('/index.ts'),
limit: 300,
},
]
const walkFiles = (absDir, relBase = '') => {
if (!fs.existsSync(absDir)) return []
const entries = fs.readdirSync(absDir, { withFileTypes: true })
const out = []
for (const entry of entries) {
const abs = path.join(absDir, entry.name)
const rel = path.join(relBase, entry.name).replace(/\\/g, '/')
if (entry.isDirectory()) {
out.push(...walkFiles(abs, rel))
continue
}
out.push({ absPath: abs, relPath: rel })
}
return out
}
const countLines = (absPath) => {
const raw = fs.readFileSync(absPath, 'utf8')
if (raw.length === 0) return 0
return raw.split('\n').length
}
const violations = []
for (const rule of RULES) {
const absDir = path.join(ROOT, rule.dir)
const files = walkFiles(absDir, rule.dir).filter((f) => rule.include(f.relPath))
for (const file of files) {
const lineCount = countLines(file.absPath)
if (lineCount > rule.limit) {
violations.push({
label: rule.label,
relPath: file.relPath,
lineCount,
limit: rule.limit,
})
}
}
}
if (violations.length === 0) {
process.stdout.write('[file-line-count-guard] PASS\n')
process.exit(0)
}
process.stderr.write('[file-line-count-guard] FAIL: file size budget exceeded\n')
for (const violation of violations) {
process.stderr.write(
`- [${violation.label}] ${violation.relPath}: ${violation.lineCount} > ${violation.limit}\n`,
)
}
process.exit(1)
@@ -0,0 +1,102 @@
#!/usr/bin/env node
import fs from 'fs'
import path from 'path'
import process from 'process'
import { pathToFileURL } from 'url'
const root = process.cwd()
const handlersDir = path.join(root, 'src', 'lib', 'workers', 'handlers')
export const NORMALIZATION_HELPER_ALLOWLIST = new Set([
'src/lib/workers/handlers/image-task-handler-shared.ts',
])
const ACCEPTED_NORMALIZATION_MARKERS = [
/\bnormalizeReferenceImagesForGeneration\s*\(/,
/\bnormalizeToBase64ForGeneration\s*\(/,
/\bgenerateProjectLabeledImageToStorage\s*\(/,
/\bgenerateCleanImageToStorage\s*\(/,
]
function fail(title, details = []) {
process.stderr.write(`\n[image-reference-normalization-guard] ${title}\n`)
for (const detail of details) {
process.stderr.write(` - ${detail}\n`)
}
process.exit(1)
}
function walk(dir, out = []) {
if (!fs.existsSync(dir)) return out
const entries = fs.readdirSync(dir, { withFileTypes: true })
for (const entry of entries) {
if (entry.name === '.git' || entry.name === '.next' || entry.name === 'node_modules') continue
const fullPath = path.join(dir, entry.name)
if (entry.isDirectory()) {
walk(fullPath, out)
continue
}
if (entry.name.endsWith('.ts')) out.push(fullPath)
}
return out
}
function toRel(fullPath) {
return path.relative(root, fullPath).split(path.sep).join('/')
}
function usesGenerationReferenceImages(content) {
return /\bresolveImageSourceFromGeneration\s*\(/.test(content) && /\breferenceImages\s*:/.test(content)
}
function hasNormalizationMarker(content) {
return ACCEPTED_NORMALIZATION_MARKERS.some((pattern) => pattern.test(content))
}
export function inspectImageReferenceNormalization(relPath, content) {
if (NORMALIZATION_HELPER_ALLOWLIST.has(relPath)) return []
if (!usesGenerationReferenceImages(content)) return []
if (hasNormalizationMarker(content)) return []
return [
`${relPath} uses resolveImageSourceFromGeneration with referenceImages but does not reference normalizeReferenceImagesForGeneration/normalizeToBase64ForGeneration/generateProjectLabeledImageToStorage/generateCleanImageToStorage`,
]
}
export function findImageReferenceNormalizationViolations(scanRoot = root) {
const scanDir = path.join(scanRoot, 'src', 'lib', 'workers', 'handlers')
return walk(scanDir)
.map((fullPath) => {
const relPath = path.relative(scanRoot, fullPath).split(path.sep).join('/')
const content = fs.readFileSync(fullPath, 'utf8')
return inspectImageReferenceNormalization(relPath, content)
})
.flat()
}
export function main() {
if (!fs.existsSync(handlersDir)) {
fail('Missing src/lib/workers/handlers directory')
}
const handlerFiles = walk(handlersDir)
const violations = handlerFiles
.map((fullPath) => {
const relPath = toRel(fullPath)
const content = fs.readFileSync(fullPath, 'utf8')
return inspectImageReferenceNormalization(relPath, content)
})
.flat()
if (violations.length > 0) {
fail('Found image reference normalization violations', violations)
}
process.stdout.write(
`[image-reference-normalization-guard] OK handlers=${handlerFiles.length} allowlist=${NORMALIZATION_HELPER_ALLOWLIST.size}\n`,
)
}
if (process.argv[1] && import.meta.url === pathToFileURL(process.argv[1]).href) {
main()
}
+101
View File
@@ -0,0 +1,101 @@
#!/usr/bin/env node
import fs from 'fs'
import path from 'path'
import process from 'process'
const root = process.cwd()
const sourceExtensions = new Set(['.ts', '.tsx', '.js', '.jsx', '.mjs', '.cjs'])
const scanDirectories = [
'src/app/[locale]',
]
const extraFiles = [
'src/components/Navbar.tsx',
'src/components/LanguageSwitcher.tsx',
]
function toRel(fullPath) {
return path.relative(root, fullPath).split(path.sep).join('/')
}
function walk(dir, out = []) {
if (!fs.existsSync(dir)) return out
const entries = fs.readdirSync(dir, { withFileTypes: true })
for (const entry of entries) {
if (entry.name === '.git' || entry.name === '.next' || entry.name === 'node_modules') continue
const fullPath = path.join(dir, entry.name)
if (entry.isDirectory()) {
walk(fullPath, out)
continue
}
if (sourceExtensions.has(path.extname(fullPath))) {
out.push(fullPath)
}
}
return out
}
function gatherTargetFiles() {
const files = scanDirectories.flatMap((dir) => walk(path.join(root, dir)))
for (const relPath of extraFiles) {
const fullPath = path.join(root, relPath)
if (fs.existsSync(fullPath)) {
files.push(fullPath)
}
}
return Array.from(new Set(files))
}
function findViolations(content, relPath) {
const violations = []
const lines = content.split('\n')
const nextLinkImport = /from\s+['"]next\/link['"]/
const nextNavigationUseRouterImport = /import\s*{[\s\S]*?\buseRouter\b[\s\S]*?}\s*from\s*['"]next\/navigation['"]/m
const rootHrefLiteral = /\bhref\s*=\s*["']\//
const rootHrefTemplate = /\bhref\s*=\s*{`\//
const rootRouterCall = /\brouter\.(push|replace|prefetch)\s*\(\s*["'`]\//
const nextLinkIndex = content.search(nextLinkImport)
if (nextLinkIndex >= 0) {
const lineNo = content.slice(0, nextLinkIndex).split('\n').length
violations.push(`${relPath}:${lineNo} do not import next/link in locale navigation surface; use @/i18n/navigation Link`)
}
const nextNavigationRouterIndex = content.search(nextNavigationUseRouterImport)
if (nextNavigationRouterIndex >= 0) {
const lineNo = content.slice(0, nextNavigationRouterIndex).split('\n').length
violations.push(`${relPath}:${lineNo} do not import useRouter from next/navigation in locale navigation surface; use @/i18n/navigation useRouter`)
}
for (let index = 0; index < lines.length; index += 1) {
const line = lines[index]
const lineNo = index + 1
if (rootHrefLiteral.test(line) || rootHrefTemplate.test(line)) {
violations.push(`${relPath}:${lineNo} do not use root-literal href; use Link href={{ pathname: '...' }} via @/i18n/navigation`)
}
if (rootRouterCall.test(line)) {
violations.push(`${relPath}:${lineNo} do not use root-literal router navigation; use router.push/replace({ pathname: '...' }) via @/i18n/navigation`)
}
}
return violations
}
const violations = []
for (const filePath of gatherTargetFiles()) {
const content = fs.readFileSync(filePath, 'utf8')
violations.push(...findViolations(content, toRel(filePath)))
}
if (violations.length > 0) {
console.error('\n[locale-navigation-guard] violations found:')
for (const violation of violations) {
console.error(` - ${violation}`)
}
process.exit(1)
}
console.log('[locale-navigation-guard] OK')
+77
View File
@@ -0,0 +1,77 @@
#!/usr/bin/env node
import fs from 'fs'
import path from 'path'
import process from 'process'
const root = process.cwd()
const scanRoots = ['src/app/api', 'src/pages/api']
const allowedPrefixes = []
const sourceExtensions = new Set(['.ts', '.tsx', '.js', '.jsx', '.mjs', '.cjs'])
function fail(title, details = []) {
console.error(`\n[no-api-direct-llm-call] ${title}`)
for (const line of details) {
console.error(` - ${line}`)
}
process.exit(1)
}
function toRel(fullPath) {
return path.relative(root, fullPath).split(path.sep).join('/')
}
function walk(dir, out = []) {
if (!fs.existsSync(dir)) return out
const entries = fs.readdirSync(dir, { withFileTypes: true })
for (const entry of entries) {
if (entry.name === '.git' || entry.name === '.next' || entry.name === 'node_modules') continue
const fullPath = path.join(dir, entry.name)
if (entry.isDirectory()) {
walk(fullPath, out)
continue
}
const ext = path.extname(entry.name)
if (sourceExtensions.has(ext)) {
out.push(fullPath)
}
}
return out
}
function isAllowedFile(relPath) {
return allowedPrefixes.some((prefix) => relPath.startsWith(prefix))
}
function collectViolations(fullPath) {
const relPath = toRel(fullPath)
if (isAllowedFile(relPath)) return []
const content = fs.readFileSync(fullPath, 'utf8')
const lines = content.split('\n')
const violations = []
for (let i = 0; i < lines.length; i += 1) {
const line = lines[i]
if (/from\s+['"]@\/lib\/llm-client['"]/.test(line)) {
violations.push(`${relPath}:${i + 1} forbidden import from '@/lib/llm-client'`)
}
if (/\bchatCompletion[A-Za-z0-9_]*\s*\(/.test(line)) {
violations.push(`${relPath}:${i + 1} forbidden direct chatCompletion* call`)
}
if (/\bisInternalTaskExecution\b/.test(line)) {
violations.push(`${relPath}:${i + 1} forbidden dual-track fallback marker isInternalTaskExecution`)
}
}
return violations
}
const allFiles = scanRoots.flatMap((scanRoot) => walk(path.join(root, scanRoot)))
const violations = allFiles.flatMap((fullPath) => collectViolations(fullPath))
if (violations.length > 0) {
fail('Found forbidden direct LLM execution in production API routes', violations)
}
console.log('[no-api-direct-llm-call] OK')
@@ -0,0 +1,45 @@
#!/usr/bin/env node
import fs from 'fs'
import path from 'path'
const ROOT = process.cwd()
const API_ROOT = path.join(ROOT, 'src', 'app', 'api')
const KNOWN_DUPLICATE_GROUPS = [
{
key: 'user-llm-test-connection',
candidates: [
'src/app/api/user/api-config/test-connection/route.ts',
'src/app/api/user/test-llm-provider/route.ts',
],
},
]
const exists = (relPath) => fs.existsSync(path.join(ROOT, relPath))
const failures = []
for (const group of KNOWN_DUPLICATE_GROUPS) {
const present = group.candidates.filter(exists)
if (present.length > 1) {
failures.push({ key: group.key, present })
}
}
if (!fs.existsSync(API_ROOT)) {
process.stdout.write('[no-duplicate-endpoint-entry] PASS (api dir missing)\n')
process.exit(0)
}
if (failures.length === 0) {
process.stdout.write('[no-duplicate-endpoint-entry] PASS\n')
process.exit(0)
}
process.stderr.write('[no-duplicate-endpoint-entry] FAIL: duplicated endpoint entry detected\n')
for (const failure of failures) {
process.stderr.write(`- ${failure.key}\n`)
for (const relPath of failure.present) {
process.stderr.write(` - ${relPath}\n`)
}
}
process.exit(1)
@@ -0,0 +1,73 @@
#!/usr/bin/env node
import fs from 'fs'
import path from 'path'
import process from 'process'
const root = process.cwd()
const sourceExtensions = new Set(['.ts', '.tsx', '.js', '.jsx', '.mjs', '.cjs'])
const scanRoots = ['src']
const allowConstantDefinitionsIn = new Set([
'src/lib/constants.ts',
])
const forbiddenCapabilityConstants = [
'VIDEO_MODELS',
'FIRST_LAST_FRAME_MODELS',
'AUDIO_SUPPORTED_MODELS',
'BANANA_MODELS',
'BANANA_RESOLUTION_OPTIONS',
]
function fail(title, details = []) {
console.error(`\n[no-hardcoded-model-capabilities] ${title}`)
for (const line of details) {
console.error(` - ${line}`)
}
process.exit(1)
}
function toRel(fullPath) {
return path.relative(root, fullPath).split(path.sep).join('/')
}
function walk(dir, out = []) {
if (!fs.existsSync(dir)) return out
const entries = fs.readdirSync(dir, { withFileTypes: true })
for (const entry of entries) {
if (entry.name === '.git' || entry.name === '.next' || entry.name === 'node_modules') continue
const fullPath = path.join(dir, entry.name)
if (entry.isDirectory()) {
walk(fullPath, out)
continue
}
if (sourceExtensions.has(path.extname(entry.name))) {
out.push(fullPath)
}
}
return out
}
const files = scanRoots.flatMap((scanRoot) => walk(path.join(root, scanRoot)))
const violations = []
for (const fullPath of files) {
const relPath = toRel(fullPath)
if (allowConstantDefinitionsIn.has(relPath)) continue
const lines = fs.readFileSync(fullPath, 'utf8').split('\n')
for (let index = 0; index < lines.length; index += 1) {
const line = lines[index]
for (const token of forbiddenCapabilityConstants) {
const tokenPattern = new RegExp(`\\b${token}\\b`)
if (tokenPattern.test(line)) {
violations.push(`${relPath}:${index + 1} forbidden hardcoded model capability token ${token}`)
}
}
}
}
if (violations.length > 0) {
fail('Found hardcoded model capability usage', violations)
}
console.log('[no-hardcoded-model-capabilities] OK')
@@ -0,0 +1,77 @@
#!/usr/bin/env node
import fs from 'fs'
import path from 'path'
import process from 'process'
const root = process.cwd()
const scanRoots = ['src/app/api', 'src/pages/api']
const allowedPrefixes = ['src/app/api/ui-review/', 'src/pages/api/ui-review/']
const sourceExtensions = new Set(['.ts', '.tsx', '.js', '.jsx', '.mjs', '.cjs'])
function fail(title, details = []) {
console.error(`\n[no-internal-task-sync-fallback] ${title}`)
for (const line of details) {
console.error(` - ${line}`)
}
process.exit(1)
}
function toRel(fullPath) {
return path.relative(root, fullPath).split(path.sep).join('/')
}
function walk(dir, out = []) {
if (!fs.existsSync(dir)) return out
const entries = fs.readdirSync(dir, { withFileTypes: true })
for (const entry of entries) {
if (entry.name === '.git' || entry.name === '.next' || entry.name === 'node_modules') continue
const fullPath = path.join(dir, entry.name)
if (entry.isDirectory()) {
walk(fullPath, out)
continue
}
if (sourceExtensions.has(path.extname(entry.name))) {
out.push(fullPath)
}
}
return out
}
function isAllowedFile(relPath) {
return allowedPrefixes.some((prefix) => relPath.startsWith(prefix))
}
function collectViolations(fullPath) {
const relPath = toRel(fullPath)
if (isAllowedFile(relPath)) return []
const content = fs.readFileSync(fullPath, 'utf8')
const lines = content.split('\n')
const violations = []
for (let i = 0; i < lines.length; i += 1) {
const line = lines[i]
if (/\bisInternalTaskExecution\b/.test(line)) {
violations.push(`${relPath}:${i + 1} forbidden dual-track fallback marker isInternalTaskExecution`)
}
if (/\bshouldRunSyncTask\s*\(/.test(line)) {
violations.push(`${relPath}:${i + 1} forbidden sync-mode branch helper shouldRunSyncTask`)
}
}
if (/\bmaybeSubmitLLMTask\s*\(/.test(content) && !/sync mode is disabled for this route/.test(content)) {
violations.push(`${relPath} missing explicit sync-disabled guard after maybeSubmitLLMTask`)
}
return violations
}
const allFiles = scanRoots.flatMap((scanRoot) => walk(path.join(root, scanRoot)))
const violations = allFiles.flatMap((fullPath) => collectViolations(fullPath))
if (violations.length > 0) {
fail('Found potential sync fallback or dual-track task branch in production API routes', violations)
}
console.log('[no-internal-task-sync-fallback] OK')
@@ -0,0 +1,88 @@
#!/usr/bin/env node
import fs from 'fs'
import path from 'path'
import process from 'process'
const root = process.cwd()
const sourceExtensions = new Set(['.ts', '.tsx', '.js', '.jsx', '.mjs', '.cjs'])
const allowFactoryImportIn = new Set([
'src/lib/generator-api.ts',
'src/lib/generators/factory.ts',
])
function fail(title, details = []) {
console.error(`\n[no-media-provider-bypass] ${title}`)
for (const line of details) {
console.error(` - ${line}`)
}
process.exit(1)
}
function toRel(fullPath) {
return path.relative(root, fullPath).split(path.sep).join('/')
}
function walk(dir, out = []) {
if (!fs.existsSync(dir)) return out
const entries = fs.readdirSync(dir, { withFileTypes: true })
for (const entry of entries) {
if (entry.name === '.git' || entry.name === '.next' || entry.name === 'node_modules') continue
const fullPath = path.join(dir, entry.name)
if (entry.isDirectory()) {
walk(fullPath, out)
continue
}
if (sourceExtensions.has(path.extname(entry.name))) {
out.push(fullPath)
}
}
return out
}
const generatorApiPath = path.join(root, 'src/lib/generator-api.ts')
if (!fs.existsSync(generatorApiPath)) {
fail('Missing src/lib/generator-api.ts')
}
const generatorApiContent = fs.readFileSync(generatorApiPath, 'utf8')
const resolveModelSelectionHits = (generatorApiContent.match(/resolveModelSelection\s*\(/g) || []).length
if (resolveModelSelectionHits < 2) {
fail('generator-api must route both image and video generation through resolveModelSelection', [
'expected >= 2 resolveModelSelection(...) calls in src/lib/generator-api.ts',
])
}
const allFiles = walk(path.join(root, 'src'))
const violations = []
for (const fullPath of allFiles) {
const relPath = toRel(fullPath)
const content = fs.readFileSync(fullPath, 'utf8')
const lines = content.split('\n')
for (let i = 0; i < lines.length; i += 1) {
const line = lines[i]
if (
relPath !== 'src/lib/generators/factory.ts' &&
(/\bcreateImageGeneratorByModel\s*\(/.test(line) || /\bcreateVideoGeneratorByModel\s*\(/.test(line))
) {
violations.push(`${relPath}:${i + 1} forbidden provider-bypass factory call create*GeneratorByModel(...)`)
}
if ((/\bgetImageApiKey\s*\(/.test(line) || /\bgetVideoApiKey\s*\(/.test(line)) && relPath !== 'src/lib/api-config.ts') {
violations.push(`${relPath}:${i + 1} forbidden direct getImageApiKey/getVideoApiKey usage outside api-config`)
}
if (/from\s+['"]@\/lib\/generators\/factory['"]/.test(line) && !allowFactoryImportIn.has(relPath)) {
violations.push(`${relPath}:${i + 1} forbidden direct import from '@/lib/generators/factory' (must go through generator-api)`)
}
}
}
if (violations.length > 0) {
fail('Found media provider routing bypass', violations)
}
console.log('[no-media-provider-bypass] OK')
+93
View File
@@ -0,0 +1,93 @@
#!/usr/bin/env node
import fs from 'fs'
import path from 'path'
import process from 'process'
const root = process.cwd()
const sourceExtensions = new Set(['.ts', '.tsx', '.js', '.jsx', '.mjs', '.cjs'])
const scanRoots = ['src/app', 'src/lib']
const modelFields = [
'analysisModel',
'characterModel',
'locationModel',
'storyboardModel',
'editModel',
'videoModel',
]
function fail(title, details = []) {
console.error(`\n[no-model-key-downgrade] ${title}`)
for (const line of details) {
console.error(` - ${line}`)
}
process.exit(1)
}
function toRel(fullPath) {
return path.relative(root, fullPath).split(path.sep).join('/')
}
function walk(dir, out = []) {
if (!fs.existsSync(dir)) return out
const entries = fs.readdirSync(dir, { withFileTypes: true })
for (const entry of entries) {
if (entry.name === '.git' || entry.name === '.next' || entry.name === 'node_modules') continue
const fullPath = path.join(dir, entry.name)
if (entry.isDirectory()) {
walk(fullPath, out)
continue
}
if (sourceExtensions.has(path.extname(entry.name))) {
out.push(fullPath)
}
}
return out
}
function collectViolations(filePath) {
const relPath = toRel(filePath)
const lines = fs.readFileSync(filePath, 'utf8').split('\n')
const violations = []
const modelFieldPattern = new RegExp(`\\b(${modelFields.join('|')})\\s*:\\s*[^,\\n]*\\bmodelId\\b`)
const optionModelIdPattern = /value=\{model\.modelId\}/
for (let index = 0; index < lines.length; index += 1) {
const line = lines[index]
if (modelFieldPattern.test(line)) {
violations.push(`${relPath}:${index + 1} default model field must persist model_key, not modelId`)
}
if (optionModelIdPattern.test(line)) {
violations.push(`${relPath}:${index + 1} UI option value must use modelKey, not model.modelId`)
}
}
return violations
}
function assertFileContains(relativePath, requiredSnippets) {
const fullPath = path.join(root, relativePath)
if (!fs.existsSync(fullPath)) {
fail('Missing required contract file', [relativePath])
}
const content = fs.readFileSync(fullPath, 'utf8')
const missing = requiredSnippets.filter((snippet) => !content.includes(snippet))
if (missing.length > 0) {
fail('Model key contract anchor missing', missing.map((snippet) => `${relativePath} missing: ${snippet}`))
}
}
const files = scanRoots.flatMap((scanRoot) => walk(path.join(root, scanRoot)))
const violations = files.flatMap((filePath) => collectViolations(filePath))
assertFileContains('src/lib/model-config-contract.ts', ['parseModelKeyStrict', 'markerIndex === -1) return null'])
assertFileContains('src/lib/config-service.ts', ['parseModelKeyStrict'])
assertFileContains('src/app/api/user/api-config/route.ts', ['validateDefaultModelKey', 'must be provider::modelId'])
assertFileContains('src/app/api/novel-promotion/[projectId]/route.ts', ['must be provider::modelId'])
if (violations.length > 0) {
fail('Found model key downgrade pattern', violations)
}
console.log('[no-model-key-downgrade] OK')
@@ -0,0 +1,109 @@
#!/usr/bin/env node
import fs from 'fs'
import path from 'path'
import process from 'process'
const root = process.cwd()
const sourceExtensions = new Set(['.ts', '.tsx', '.js', '.jsx', '.mjs', '.cjs'])
const lineScanRoots = [
'src/app/[locale]/workspace/[projectId]/modes/novel-promotion',
'src/lib/query/hooks',
]
const fileScanRoots = [
'src/app/api/novel-promotion',
'src/lib/workers/handlers',
]
const lineRules = [
{
name: 'shadow state localStoryboards',
test: (line) => /const\s*\[\s*localStoryboards\s*,\s*setLocalStoryboards\s*\]\s*=\s*useState/.test(line),
},
{
name: 'shadow state localVoiceLines',
test: (line) => /const\s*\[\s*localVoiceLines\s*,\s*setLocalVoiceLines\s*\]\s*=\s*useState/.test(line),
},
{
name: 'hardcoded queryKey array',
test: (line) => /queryKey\s*:\s*\[/.test(line),
},
]
function fail(title, details = []) {
console.error(`\n[no-multiple-sources-of-truth] ${title}`)
for (const detail of details) {
console.error(` - ${detail}`)
}
process.exit(1)
}
function toRel(fullPath) {
return path.relative(root, fullPath).split(path.sep).join('/')
}
function walk(dir, out = []) {
if (!fs.existsSync(dir)) return out
const entries = fs.readdirSync(dir, { withFileTypes: true })
for (const entry of entries) {
if (entry.name === '.git' || entry.name === '.next' || entry.name === 'node_modules') continue
const fullPath = path.join(dir, entry.name)
if (entry.isDirectory()) {
walk(fullPath, out)
continue
}
if (sourceExtensions.has(path.extname(entry.name))) out.push(fullPath)
}
return out
}
function collectLineViolations(fullPath) {
const relPath = toRel(fullPath)
const content = fs.readFileSync(fullPath, 'utf8')
const lines = content.split('\n')
const violations = []
for (let i = 0; i < lines.length; i += 1) {
const line = lines[i]
for (const rule of lineRules) {
if (rule.test(line)) {
violations.push(`${relPath}:${i + 1} forbidden: ${rule.name}`)
}
}
}
return violations
}
function collectFileViolations(fullPath) {
const relPath = toRel(fullPath)
const content = fs.readFileSync(fullPath, 'utf8')
const violations = []
const updateCallRegex = /novelPromotionProject\.update\(\{[\s\S]*?\n\s*\}\)/g
for (const match of content.matchAll(updateCallRegex)) {
const block = match[0]
const hasStageWrite = /\bdata\s*:\s*\{[\s\S]*?\bstage\s*:/.test(block)
if (!hasStageWrite) continue
const before = content.slice(0, match.index ?? 0)
const lineNumber = before.split('\n').length
violations.push(`${relPath}:${lineNumber} forbidden: DB stage write in novelPromotionProject.update`)
}
return violations
}
const lineFiles = lineScanRoots.flatMap((scanRoot) => walk(path.join(root, scanRoot)))
const fileFiles = fileScanRoots.flatMap((scanRoot) => walk(path.join(root, scanRoot)))
const lineViolations = lineFiles.flatMap((fullPath) => collectLineViolations(fullPath))
const fileViolations = fileFiles.flatMap((fullPath) => collectFileViolations(fullPath))
const allViolations = [...lineViolations, ...fileViolations]
if (allViolations.length > 0) {
fail('Found multiple-sources-of-truth regressions', allViolations)
}
console.log('[no-multiple-sources-of-truth] OK')
+95
View File
@@ -0,0 +1,95 @@
#!/usr/bin/env node
import fs from 'fs'
import path from 'path'
import process from 'process'
const root = process.cwd()
const sourceExtensions = new Set(['.ts', '.tsx', '.js', '.jsx', '.mjs', '.cjs'])
const scanRoots = ['src/lib', 'src/app/api']
const allowModelRegistryUsage = new Set()
function fail(title, details = []) {
console.error(`\n[no-provider-guessing] ${title}`)
for (const line of details) {
console.error(` - ${line}`)
}
process.exit(1)
}
function toRel(fullPath) {
return path.relative(root, fullPath).split(path.sep).join('/')
}
function walk(dir, out = []) {
if (!fs.existsSync(dir)) return out
const entries = fs.readdirSync(dir, { withFileTypes: true })
for (const entry of entries) {
if (entry.name === '.git' || entry.name === '.next' || entry.name === 'node_modules') continue
const fullPath = path.join(dir, entry.name)
if (entry.isDirectory()) {
walk(fullPath, out)
continue
}
if (sourceExtensions.has(path.extname(entry.name))) {
out.push(fullPath)
}
}
return out
}
const apiConfigPath = path.join(root, 'src/lib/api-config.ts')
if (!fs.existsSync(apiConfigPath)) {
fail('Missing src/lib/api-config.ts')
}
const legacyRegistryPath = path.join(root, 'src/lib/model-registry.ts')
if (fs.existsSync(legacyRegistryPath)) {
fail('Legacy runtime registry must be removed', ['src/lib/model-registry.ts'])
}
const apiConfigText = fs.readFileSync(apiConfigPath, 'utf8')
const forbiddenApiConfigTokens = [
'includeAnyType',
'crossTypeCandidates',
'matches multiple providers across media types',
]
const apiViolations = forbiddenApiConfigTokens
.filter((token) => apiConfigText.includes(token))
.map((token) => `src/lib/api-config.ts contains forbidden provider-guessing token: ${token}`)
// 验证 api-config.ts 使用严格 provider.id 精确匹配(不按 type 过滤,不做 providerKey 模糊匹配)
if (!apiConfigText.includes('pickProviderStrict(')) {
apiViolations.push('src/lib/api-config.ts missing strict provider resolution function (pickProviderStrict)')
}
const files = scanRoots.flatMap((scanRoot) => walk(path.join(root, scanRoot)))
const violations = [...apiViolations]
for (const fullPath of files) {
const relPath = toRel(fullPath)
const content = fs.readFileSync(fullPath, 'utf8')
const lines = content.split('\n')
for (let index = 0; index < lines.length; index += 1) {
const line = lines[index]
if (
/from\s+['"]@\/lib\/model-registry['"]/.test(line)
&& !allowModelRegistryUsage.has(relPath)
) {
violations.push(`${relPath}:${index + 1} forbidden model-registry import outside allowed boundary`)
}
if (
(/\bgetModelRegistryEntry\s*\(/.test(line) || /\blistRegisteredModels\s*\(/.test(line))
&& !allowModelRegistryUsage.has(relPath)
) {
violations.push(`${relPath}:${index + 1} forbidden model-registry runtime mapping usage`)
}
}
}
if (violations.length > 0) {
fail('Found provider guessing / registry mapping violation', violations)
}
console.log('[no-provider-guessing] OK')
+81
View File
@@ -0,0 +1,81 @@
#!/usr/bin/env node
import fs from 'fs'
import path from 'path'
import process from 'process'
const root = process.cwd()
const scanRoots = [
'src/app/[locale]/workspace/[projectId]/modes/novel-promotion',
]
const sourceExtensions = new Set(['.ts', '.tsx', '.js', '.jsx', '.mjs', '.cjs'])
const forbiddenRules = [
{
name: 'localProject/localEpisode mirror state',
test: (line) => /\blocalProject\b|\blocalEpisode\b/.test(line),
},
{
name: 'server mirror useState(projectData.*)',
test: (line) => /useState\s*\(\s*projectData\./.test(line),
},
{
name: 'server mirror useState(episode?.*)',
test: (line) => /useState\s*\(\s*episode\?\./.test(line),
},
]
function fail(title, details = []) {
console.error(`\n[no-server-mirror-state] ${title}`)
for (const line of details) {
console.error(` - ${line}`)
}
process.exit(1)
}
function toRel(fullPath) {
return path.relative(root, fullPath).split(path.sep).join('/')
}
function walk(dir, out = []) {
if (!fs.existsSync(dir)) return out
const entries = fs.readdirSync(dir, { withFileTypes: true })
for (const entry of entries) {
if (entry.name === '.git' || entry.name === '.next' || entry.name === 'node_modules') continue
const fullPath = path.join(dir, entry.name)
if (entry.isDirectory()) {
walk(fullPath, out)
continue
}
const ext = path.extname(entry.name)
if (sourceExtensions.has(ext)) out.push(fullPath)
}
return out
}
function collectViolations(fullPath) {
const relPath = toRel(fullPath)
const content = fs.readFileSync(fullPath, 'utf8')
const lines = content.split('\n')
const violations = []
for (let i = 0; i < lines.length; i += 1) {
const line = lines[i]
for (const rule of forbiddenRules) {
if (rule.test(line)) {
violations.push(`${relPath}:${i + 1} forbidden: ${rule.name}`)
}
}
}
return violations
}
const allFiles = scanRoots.flatMap((scanRoot) => walk(path.join(root, scanRoot)))
const violations = allFiles.flatMap((fullPath) => collectViolations(fullPath))
if (violations.length > 0) {
fail('Found forbidden server mirror state patterns', violations)
}
console.log('[no-server-mirror-state] OK')
+143
View File
@@ -0,0 +1,143 @@
#!/usr/bin/env node
import fs from 'fs'
import path from 'path'
import process from 'process'
const root = process.cwd()
const catalogPath = path.join(root, 'src', 'lib', 'prompt-i18n', 'catalog.ts')
const singlePlaceholderPattern = /\{([A-Za-z0-9_]+)\}/g
const doublePlaceholderPattern = /\{\{([A-Za-z0-9_]+)\}\}/g
const unresolvedPlaceholderPattern = /\{\{?[A-Za-z0-9_]+\}?\}/g
function fail(title, details = []) {
console.error(`\n[prompt-ab-regression] ${title}`)
for (const line of details) {
console.error(` - ${line}`)
}
process.exit(1)
}
function parseCatalog(text) {
const entries = []
const entryPattern = /pathStem:\s*'([^']+)'\s*,[\s\S]*?variableKeys:\s*\[([\s\S]*?)\]\s*,/g
for (const match of text.matchAll(entryPattern)) {
const pathStem = match[1]
const rawKeys = match[2] || ''
const keys = Array.from(rawKeys.matchAll(/'([^']+)'/g)).map((item) => item[1])
entries.push({ pathStem, variableKeys: keys })
}
return entries
}
function extractPlaceholders(template) {
const keys = new Set()
for (const match of template.matchAll(singlePlaceholderPattern)) {
if (match[1]) keys.add(match[1])
}
for (const match of template.matchAll(doublePlaceholderPattern)) {
if (match[1]) keys.add(match[1])
}
return Array.from(keys)
}
function replaceAll(template, variables) {
let rendered = template
for (const [key, value] of Object.entries(variables)) {
const pattern = new RegExp(`\\{\\{${key}\\}\\}|\\{${key}\\}`, 'g')
rendered = rendered.replace(pattern, value)
}
return rendered
}
function setDiff(left, right) {
const rightSet = new Set(right)
return left.filter((item) => !rightSet.has(item))
}
if (!fs.existsSync(catalogPath)) {
fail('catalog.ts not found', ['src/lib/prompt-i18n/catalog.ts'])
}
const catalogText = fs.readFileSync(catalogPath, 'utf8')
const entries = parseCatalog(catalogText)
if (entries.length === 0) {
fail('failed to parse prompt catalog entries')
}
const violations = []
for (const entry of entries) {
const zhPath = path.join(root, 'lib', 'prompts', `${entry.pathStem}.zh.txt`)
const enPath = path.join(root, 'lib', 'prompts', `${entry.pathStem}.en.txt`)
if (!fs.existsSync(zhPath)) {
violations.push(`missing zh template: lib/prompts/${entry.pathStem}.zh.txt`)
continue
}
if (!fs.existsSync(enPath)) {
violations.push(`missing en template: lib/prompts/${entry.pathStem}.en.txt`)
continue
}
const zhTemplate = fs.readFileSync(zhPath, 'utf8')
const enTemplate = fs.readFileSync(enPath, 'utf8')
const declared = entry.variableKeys
const zhPlaceholders = extractPlaceholders(zhTemplate)
const enPlaceholders = extractPlaceholders(enTemplate)
const missingInZh = setDiff(declared, zhPlaceholders)
const missingInEn = setDiff(declared, enPlaceholders)
const extraInZh = setDiff(zhPlaceholders, declared)
const extraInEn = setDiff(enPlaceholders, declared)
const zhOnly = setDiff(zhPlaceholders, enPlaceholders)
const enOnly = setDiff(enPlaceholders, zhPlaceholders)
for (const key of missingInZh) {
violations.push(`missing {${key}} in zh template: lib/prompts/${entry.pathStem}.zh.txt`)
}
for (const key of missingInEn) {
violations.push(`missing {${key}} in en template: lib/prompts/${entry.pathStem}.en.txt`)
}
for (const key of extraInZh) {
violations.push(`unexpected {${key}} in zh template: lib/prompts/${entry.pathStem}.zh.txt`)
}
for (const key of extraInEn) {
violations.push(`unexpected {${key}} in en template: lib/prompts/${entry.pathStem}.en.txt`)
}
for (const key of zhOnly) {
violations.push(`placeholder {${key}} exists only in zh template: ${entry.pathStem}`)
}
for (const key of enOnly) {
violations.push(`placeholder {${key}} exists only in en template: ${entry.pathStem}`)
}
const variables = Object.fromEntries(
declared.map((key) => [key, `__AB_SAMPLE_${key.toUpperCase()}__`]),
)
const renderedZh = replaceAll(zhTemplate, variables)
const renderedEn = replaceAll(enTemplate, variables)
const unresolvedZh = renderedZh.match(unresolvedPlaceholderPattern) || []
const unresolvedEn = renderedEn.match(unresolvedPlaceholderPattern) || []
if (unresolvedZh.length > 0) {
violations.push(`unresolved placeholders in zh template: ${entry.pathStem} -> ${unresolvedZh.join(', ')}`)
}
if (unresolvedEn.length > 0) {
violations.push(`unresolved placeholders in en template: ${entry.pathStem} -> ${unresolvedEn.join(', ')}`)
}
for (const [key, sample] of Object.entries(variables)) {
if (!renderedZh.includes(sample)) {
violations.push(`zh template variable not used after render: ${entry.pathStem}.{${key}}`)
}
if (!renderedEn.includes(sample)) {
violations.push(`en template variable not used after render: ${entry.pathStem}.{${key}}`)
}
}
}
if (violations.length > 0) {
fail('A/B regression check failed', violations)
}
console.log(`[prompt-ab-regression] OK (${entries.length} templates checked)`)
+160
View File
@@ -0,0 +1,160 @@
#!/usr/bin/env node
import fs from 'fs'
import path from 'path'
import process from 'process'
const root = process.cwd()
const sourceExtensions = new Set(['.ts', '.tsx', '.js', '.jsx', '.mjs', '.cjs'])
const scanRoots = ['src', 'scripts']
const allowedPromptTemplateReaders = new Set([
'src/lib/prompt-i18n/template-store.ts',
'scripts/guards/prompt-i18n-guard.mjs',
'scripts/guards/prompt-semantic-regression.mjs',
'scripts/guards/prompt-ab-regression.mjs',
'scripts/guards/prompt-json-canary-guard.mjs',
])
const languageDirectiveAllowList = new Set([
'scripts/guards/prompt-i18n-guard.mjs',
])
const languageDirectivePattern = /请用中文|中文输出|use Chinese|output in Chinese/i
function fail(title, details = []) {
console.error(`\n[prompt-i18n-guard] ${title}`)
for (const line of details) {
console.error(` - ${line}`)
}
process.exit(1)
}
function toRel(fullPath) {
return path.relative(root, fullPath).split(path.sep).join('/')
}
function walk(dir, out = []) {
if (!fs.existsSync(dir)) return out
const entries = fs.readdirSync(dir, { withFileTypes: true })
for (const entry of entries) {
if (entry.name === '.git' || entry.name === '.next' || entry.name === 'node_modules') continue
const fullPath = path.join(dir, entry.name)
if (entry.isDirectory()) {
walk(fullPath, out)
continue
}
out.push(fullPath)
}
return out
}
function listSourceFiles() {
return scanRoots
.flatMap((scanRoot) => walk(path.join(root, scanRoot)))
.filter((fullPath) => sourceExtensions.has(path.extname(fullPath)))
}
function collectDirectPromptReadViolations() {
const violations = []
const files = listSourceFiles()
for (const filePath of files) {
const relPath = toRel(filePath)
if (allowedPromptTemplateReaders.has(relPath)) continue
const content = fs.readFileSync(filePath, 'utf8')
const hasReadFileSync = /\breadFileSync\s*\(/.test(content)
if (!hasReadFileSync) continue
const hasPromptPathToken =
content.includes('lib/prompts')
|| (
/['"]lib['"]/.test(content)
&& /['"]prompts['"]/.test(content)
)
if (hasPromptPathToken) {
violations.push(`${relPath} direct prompt file read is forbidden; use buildPrompt/getPromptTemplate`)
}
}
return violations
}
function collectLanguageDirectiveViolations() {
const violations = []
for (const filePath of listSourceFiles()) {
const relPath = toRel(filePath)
if (languageDirectiveAllowList.has(relPath)) continue
const lines = fs.readFileSync(filePath, 'utf8').split('\n')
for (let index = 0; index < lines.length; index += 1) {
const line = lines[index]
if (languageDirectivePattern.test(line)) {
violations.push(`${relPath}:${index + 1} hardcoded language directive is forbidden`)
}
}
}
const promptFiles = walk(path.join(root, 'lib', 'prompts'))
.filter((fullPath) => fullPath.endsWith('.en.txt'))
for (const filePath of promptFiles) {
const relPath = toRel(filePath)
const lines = fs.readFileSync(filePath, 'utf8').split('\n')
for (let index = 0; index < lines.length; index += 1) {
const line = lines[index]
if (languageDirectivePattern.test(line)) {
violations.push(`${relPath}:${index + 1} English template cannot require Chinese output`)
}
}
}
return violations
}
function collectLegacyPromptFiles() {
return walk(path.join(root, 'lib', 'prompts'))
.map((fullPath) => toRel(fullPath))
.filter((relPath) => relPath.endsWith('.txt') && !relPath.endsWith('.zh.txt') && !relPath.endsWith('.en.txt'))
}
function verifyPromptCatalogCoverage() {
const catalogPath = path.join(root, 'src', 'lib', 'prompt-i18n', 'catalog.ts')
if (!fs.existsSync(catalogPath)) {
fail('Missing prompt catalog file', ['src/lib/prompt-i18n/catalog.ts'])
}
const catalogText = fs.readFileSync(catalogPath, 'utf8')
const stems = Array.from(catalogText.matchAll(/pathStem:\s*'([^']+)'/g)).map((match) => match[1])
if (stems.length === 0) {
fail('No prompt pathStem found in catalog.ts')
}
const missing = []
for (const stem of stems) {
const zhPath = path.join(root, 'lib', 'prompts', `${stem}.zh.txt`)
const enPath = path.join(root, 'lib', 'prompts', `${stem}.en.txt`)
if (!fs.existsSync(zhPath)) {
missing.push(`missing zh template: lib/prompts/${stem}.zh.txt`)
}
if (!fs.existsSync(enPath)) {
missing.push(`missing en template: lib/prompts/${stem}.en.txt`)
}
}
if (missing.length > 0) {
fail('Prompt template coverage check failed', missing)
}
}
const legacyPromptFiles = collectLegacyPromptFiles()
if (legacyPromptFiles.length > 0) {
fail('Legacy prompt files found (.txt without locale suffix)', legacyPromptFiles)
}
verifyPromptCatalogCoverage()
const promptReadViolations = collectDirectPromptReadViolations()
if (promptReadViolations.length > 0) {
fail('Found direct prompt template reads', promptReadViolations)
}
const languageViolations = collectLanguageDirectiveViolations()
if (languageViolations.length > 0) {
fail('Found hardcoded language directives', languageViolations)
}
console.log('[prompt-i18n-guard] OK')
+250
View File
@@ -0,0 +1,250 @@
#!/usr/bin/env node
import fs from 'fs'
import path from 'path'
import process from 'process'
const root = process.cwd()
const CANARY_FILES = {
clips: 'standards/prompt-canary/story_to_script_clips.canary.json',
screenplay: 'standards/prompt-canary/screenplay_conversion.canary.json',
storyboardPanels: 'standards/prompt-canary/storyboard_panels.canary.json',
voiceAnalysis: 'standards/prompt-canary/voice_analysis.canary.json',
}
const TEMPLATE_TOKEN_REQUIREMENTS = {
'novel-promotion/agent_clip': ['start', 'end', 'summary', 'location', 'characters'],
'novel-promotion/screenplay_conversion': [
'clip_id',
'original_text',
'scenes',
'heading',
'content',
'type',
'action',
'dialogue',
'voiceover',
],
'novel-promotion/agent_storyboard_plan': [
'panel_number',
'description',
'characters',
'location',
'scene_type',
'source_text',
],
'novel-promotion/agent_storyboard_detail': [
'panel_number',
'description',
'characters',
'location',
'scene_type',
'source_text',
'shot_type',
'camera_move',
'video_prompt',
],
'novel-promotion/agent_storyboard_insert': [
'panel_number',
'description',
'characters',
'location',
'scene_type',
'source_text',
'shot_type',
'camera_move',
'video_prompt',
],
'novel-promotion/voice_analysis': [
'lineIndex',
'speaker',
'content',
'emotionStrength',
'matchedPanel',
'storyboardId',
'panelIndex',
],
}
function fail(title, details = []) {
console.error(`\n[prompt-json-canary-guard] ${title}`)
for (const line of details) {
console.error(` - ${line}`)
}
process.exit(1)
}
function isRecord(value) {
return !!value && typeof value === 'object' && !Array.isArray(value)
}
function isString(value) {
return typeof value === 'string'
}
function isNumber(value) {
return typeof value === 'number' && Number.isFinite(value)
}
function readJson(relativePath) {
const fullPath = path.join(root, relativePath)
if (!fs.existsSync(fullPath)) {
fail('Missing canary fixture', [relativePath])
}
try {
return JSON.parse(fs.readFileSync(fullPath, 'utf8'))
} catch (error) {
fail('Invalid canary fixture JSON', [`${relativePath}: ${error instanceof Error ? error.message : String(error)}`])
}
}
function validateClipCanary(value) {
if (!Array.isArray(value) || value.length === 0) return 'clips fixture must be a non-empty array'
for (let i = 0; i < value.length; i += 1) {
const row = value[i]
if (!isRecord(row)) return `clips[${i}] must be an object`
if (!isString(row.start) || row.start.length < 5) return `clips[${i}].start must be string length >= 5`
if (!isString(row.end) || row.end.length < 5) return `clips[${i}].end must be string length >= 5`
if (!isString(row.summary) || row.summary.length === 0) return `clips[${i}].summary must be non-empty string`
if (!(row.location === null || isString(row.location))) return `clips[${i}].location must be string or null`
if (!Array.isArray(row.characters) || !row.characters.every((item) => isString(item))) {
return `clips[${i}].characters must be string array`
}
}
return null
}
function validateScreenplayCanary(value) {
if (!isRecord(value)) return 'screenplay fixture must be an object'
if (!isString(value.clip_id) || !value.clip_id) return 'screenplay.clip_id must be non-empty string'
if (!isString(value.original_text)) return 'screenplay.original_text must be string'
if (!Array.isArray(value.scenes) || value.scenes.length === 0) return 'screenplay.scenes must be non-empty array'
for (let i = 0; i < value.scenes.length; i += 1) {
const scene = value.scenes[i]
if (!isRecord(scene)) return `screenplay.scenes[${i}] must be object`
if (!isNumber(scene.scene_number)) return `screenplay.scenes[${i}].scene_number must be number`
if (!isRecord(scene.heading)) return `screenplay.scenes[${i}].heading must be object`
if (!isString(scene.heading.int_ext)) return `screenplay.scenes[${i}].heading.int_ext must be string`
if (!isString(scene.heading.location)) return `screenplay.scenes[${i}].heading.location must be string`
if (!isString(scene.heading.time)) return `screenplay.scenes[${i}].heading.time must be string`
if (!isString(scene.description)) return `screenplay.scenes[${i}].description must be string`
if (!Array.isArray(scene.characters) || !scene.characters.every((item) => isString(item))) {
return `screenplay.scenes[${i}].characters must be string array`
}
if (!Array.isArray(scene.content) || scene.content.length === 0) return `screenplay.scenes[${i}].content must be non-empty array`
for (let j = 0; j < scene.content.length; j += 1) {
const segment = scene.content[j]
if (!isRecord(segment)) return `screenplay.scenes[${i}].content[${j}] must be object`
if (!isString(segment.type)) return `screenplay.scenes[${i}].content[${j}].type must be string`
if (segment.type === 'action') {
if (!isString(segment.text)) return `screenplay action[${i}:${j}].text must be string`
} else if (segment.type === 'dialogue') {
if (!isString(segment.character)) return `screenplay dialogue[${i}:${j}].character must be string`
if (!isString(segment.lines)) return `screenplay dialogue[${i}:${j}].lines must be string`
if (segment.parenthetical !== undefined && !isString(segment.parenthetical)) {
return `screenplay dialogue[${i}:${j}].parenthetical must be string when present`
}
} else if (segment.type === 'voiceover') {
if (!isString(segment.text)) return `screenplay voiceover[${i}:${j}].text must be string`
if (segment.character !== undefined && !isString(segment.character)) {
return `screenplay voiceover[${i}:${j}].character must be string when present`
}
} else {
return `screenplay.scenes[${i}].content[${j}].type must be action/dialogue/voiceover`
}
}
}
return null
}
function validateStoryboardPanelsCanary(value) {
if (!Array.isArray(value) || value.length === 0) return 'storyboard panels fixture must be non-empty array'
for (let i = 0; i < value.length; i += 1) {
const panel = value[i]
if (!isRecord(panel)) return `storyboardPanels[${i}] must be object`
if (!isNumber(panel.panel_number)) return `storyboardPanels[${i}].panel_number must be number`
if (!isString(panel.description)) return `storyboardPanels[${i}].description must be string`
if (!isString(panel.location)) return `storyboardPanels[${i}].location must be string`
if (!isString(panel.scene_type)) return `storyboardPanels[${i}].scene_type must be string`
if (!isString(panel.source_text)) return `storyboardPanels[${i}].source_text must be string`
if (!isString(panel.shot_type)) return `storyboardPanels[${i}].shot_type must be string`
if (!isString(panel.camera_move)) return `storyboardPanels[${i}].camera_move must be string`
if (!isString(panel.video_prompt)) return `storyboardPanels[${i}].video_prompt must be string`
if (panel.duration !== undefined && !isNumber(panel.duration)) return `storyboardPanels[${i}].duration must be number when present`
if (!Array.isArray(panel.characters)) return `storyboardPanels[${i}].characters must be array`
for (let j = 0; j < panel.characters.length; j += 1) {
const character = panel.characters[j]
if (!isRecord(character)) return `storyboardPanels[${i}].characters[${j}] must be object`
if (!isString(character.name)) return `storyboardPanels[${i}].characters[${j}].name must be string`
if (character.appearance !== undefined && !isString(character.appearance)) {
return `storyboardPanels[${i}].characters[${j}].appearance must be string when present`
}
}
}
return null
}
function validateVoiceAnalysisCanary(value) {
if (!Array.isArray(value) || value.length === 0) return 'voice analysis fixture must be non-empty array'
for (let i = 0; i < value.length; i += 1) {
const row = value[i]
if (!isRecord(row)) return `voiceAnalysis[${i}] must be object`
if (!isNumber(row.lineIndex)) return `voiceAnalysis[${i}].lineIndex must be number`
if (!isString(row.speaker)) return `voiceAnalysis[${i}].speaker must be string`
if (!isString(row.content)) return `voiceAnalysis[${i}].content must be string`
if (!isNumber(row.emotionStrength)) return `voiceAnalysis[${i}].emotionStrength must be number`
if (row.matchedPanel !== null) {
if (!isRecord(row.matchedPanel)) return `voiceAnalysis[${i}].matchedPanel must be object or null`
if (!isString(row.matchedPanel.storyboardId)) return `voiceAnalysis[${i}].matchedPanel.storyboardId must be string`
if (!isNumber(row.matchedPanel.panelIndex)) return `voiceAnalysis[${i}].matchedPanel.panelIndex must be number`
}
}
return null
}
function checkTemplateTokens(pathStem, requiredTokens) {
const violations = []
for (const locale of ['zh', 'en']) {
const relPath = `lib/prompts/${pathStem}.${locale}.txt`
const fullPath = path.join(root, relPath)
if (!fs.existsSync(fullPath)) {
violations.push(`missing template: ${relPath}`)
continue
}
const content = fs.readFileSync(fullPath, 'utf8')
for (const token of requiredTokens) {
if (!content.includes(token)) {
violations.push(`missing token ${token} in ${relPath}`)
}
}
}
return violations
}
const violations = []
const clipsErr = validateClipCanary(readJson(CANARY_FILES.clips))
if (clipsErr) violations.push(clipsErr)
const screenplayErr = validateScreenplayCanary(readJson(CANARY_FILES.screenplay))
if (screenplayErr) violations.push(screenplayErr)
const panelsErr = validateStoryboardPanelsCanary(readJson(CANARY_FILES.storyboardPanels))
if (panelsErr) violations.push(panelsErr)
const voiceErr = validateVoiceAnalysisCanary(readJson(CANARY_FILES.voiceAnalysis))
if (voiceErr) violations.push(voiceErr)
for (const [pathStem, requiredTokens] of Object.entries(TEMPLATE_TOKEN_REQUIREMENTS)) {
violations.push(...checkTemplateTokens(pathStem, requiredTokens))
}
if (violations.length > 0) {
fail('JSON schema canary check failed', violations)
}
console.log('[prompt-json-canary-guard] OK')
@@ -0,0 +1,108 @@
#!/usr/bin/env node
import fs from 'fs'
import path from 'path'
import process from 'process'
const root = process.cwd()
const catalogPath = path.join(root, 'src', 'lib', 'prompt-i18n', 'catalog.ts')
const chineseCharPattern = /[\p{Script=Han}]/u
const singlePlaceholderPattern = /\{([A-Za-z0-9_]+)\}/g
const doublePlaceholderPattern = /\{\{([A-Za-z0-9_]+)\}\}/g
const criticalTemplateTokens = new Map([
['novel-promotion/voice_analysis', ['"lineIndex"', '"speaker"', '"content"', '"emotionStrength"', '"matchedPanel"']],
['novel-promotion/agent_storyboard_plan', ['"panel_number"', '"description"', '"characters"', '"location"', '"scene_type"', '"source_text"', '"shot_type"', '"camera_move"', '"video_prompt"']],
['novel-promotion/agent_storyboard_detail', ['"panel_number"', '"description"', '"characters"', '"location"', '"scene_type"', '"source_text"', '"shot_type"', '"camera_move"', '"video_prompt"']],
['novel-promotion/agent_storyboard_insert', ['"panel_number"', '"description"', '"characters"', '"location"', '"scene_type"', '"source_text"', '"shot_type"', '"camera_move"', '"video_prompt"']],
['novel-promotion/screenplay_conversion', ['"clip_id"', '"scenes"', '"heading"', '"content"', '"dialogue"', '"voiceover"']],
['novel-promotion/select_location', ['"locations"', '"name"', '"summary"', '"descriptions"']],
['novel-promotion/episode_split', ['"analysis"', '"episodes"', '"startMarker"', '"endMarker"', '"validation"']],
['novel-promotion/image_prompt_modify', ['"image_prompt"', '"video_prompt"']],
['novel-promotion/character_create', ['"prompt"']],
['novel-promotion/location_create', ['"prompt"']],
])
function fail(title, details = []) {
console.error(`\n[prompt-semantic-regression] ${title}`)
for (const line of details) {
console.error(` - ${line}`)
}
process.exit(1)
}
function parseCatalog(text) {
const entries = []
const entryPattern = /pathStem:\s*'([^']+)'\s*,[\s\S]*?variableKeys:\s*\[([\s\S]*?)\]\s*,/g
for (const match of text.matchAll(entryPattern)) {
const pathStem = match[1]
const rawKeys = match[2] || ''
const keys = Array.from(rawKeys.matchAll(/'([^']+)'/g)).map((item) => item[1])
entries.push({ pathStem, variableKeys: keys })
}
return entries
}
function extractPlaceholders(template) {
const keys = new Set()
for (const match of template.matchAll(singlePlaceholderPattern)) {
if (match[1]) keys.add(match[1])
}
for (const match of template.matchAll(doublePlaceholderPattern)) {
if (match[1]) keys.add(match[1])
}
return Array.from(keys)
}
if (!fs.existsSync(catalogPath)) {
fail('catalog.ts not found', ['src/lib/prompt-i18n/catalog.ts'])
}
const catalogText = fs.readFileSync(catalogPath, 'utf8')
const entries = parseCatalog(catalogText)
if (entries.length === 0) {
fail('failed to parse prompt catalog entries')
}
const violations = []
for (const entry of entries) {
const templatePath = path.join(root, 'lib', 'prompts', `${entry.pathStem}.en.txt`)
if (!fs.existsSync(templatePath)) {
violations.push(`missing template: lib/prompts/${entry.pathStem}.en.txt`)
continue
}
const template = fs.readFileSync(templatePath, 'utf8')
if (chineseCharPattern.test(template)) {
violations.push(`unexpected Chinese content in English template: lib/prompts/${entry.pathStem}.en.txt`)
}
const placeholders = extractPlaceholders(template)
const placeholderSet = new Set(placeholders)
const variableKeySet = new Set(entry.variableKeys)
for (const key of entry.variableKeys) {
if (!placeholderSet.has(key)) {
violations.push(`missing placeholder {${key}} in lib/prompts/${entry.pathStem}.en.txt`)
}
}
for (const key of placeholders) {
if (!variableKeySet.has(key)) {
violations.push(`unexpected placeholder {${key}} in lib/prompts/${entry.pathStem}.en.txt`)
}
}
const requiredTokens = criticalTemplateTokens.get(entry.pathStem) || []
for (const token of requiredTokens) {
if (!template.includes(token)) {
violations.push(`missing semantic token ${token} in lib/prompts/${entry.pathStem}.en.txt`)
}
}
}
if (violations.length > 0) {
fail('semantic regression check failed', violations)
}
console.log(`[prompt-semantic-regression] OK (${entries.length} templates checked)`)
@@ -0,0 +1,9 @@
{
"allowedDirectTaskStateUsageFiles": [
"src/lib/query/hooks/useTaskTargetStates.ts",
"src/lib/query/hooks/useTaskPresentation.ts",
"src/lib/query/hooks/useProjectAssets.ts",
"src/lib/query/hooks/useGlobalAssets.ts"
],
"allowedLegacyGeneratingUsageFiles": []
}
+132
View File
@@ -0,0 +1,132 @@
#!/usr/bin/env node
import fs from 'fs'
import path from 'path'
import process from 'process'
const workspaceRoot = process.cwd()
const baselinePath = path.join(workspaceRoot, 'scripts/guards/task-loading-baseline.json')
function walkFiles(dir, out = []) {
const entries = fs.readdirSync(dir, { withFileTypes: true })
for (const entry of entries) {
if (entry.name === 'node_modules' || entry.name === '.git' || entry.name === '.next') continue
const fullPath = path.join(dir, entry.name)
if (entry.isDirectory()) {
walkFiles(fullPath, out)
} else {
out.push(fullPath)
}
}
return out
}
function toPosixRelative(filePath) {
return path.relative(workspaceRoot, filePath).split(path.sep).join('/')
}
function collectMatches(files, pattern) {
const matches = []
for (const fullPath of files) {
if (!fullPath.endsWith('.ts') && !fullPath.endsWith('.tsx')) continue
const relPath = toPosixRelative(fullPath)
const content = fs.readFileSync(fullPath, 'utf8')
const lines = content.split('\n')
for (let i = 0; i < lines.length; i += 1) {
if (lines[i].includes(pattern)) {
matches.push(`${relPath}:${i + 1}`)
}
}
}
return matches
}
function fail(title, lines) {
console.error(`\n[task-loading-guard] ${title}`)
for (const line of lines) {
console.error(` - ${line}`)
}
process.exit(1)
}
if (!fs.existsSync(baselinePath)) {
fail('Missing baseline file', [toPosixRelative(baselinePath)])
}
const baseline = JSON.parse(fs.readFileSync(baselinePath, 'utf8'))
const allowedFiles = new Set(baseline.allowedDirectTaskStateUsageFiles || [])
const allowedLegacyGeneratingFiles = new Set(baseline.allowedLegacyGeneratingUsageFiles || [])
const allFiles = walkFiles(path.join(workspaceRoot, 'src'))
const directTaskStateUsage = collectMatches(allFiles, 'useTaskTargetStates(')
const directUsageOutOfAllowlist = directTaskStateUsage
.map((entry) => entry.split(':')[0])
.filter((file) => !allowedFiles.has(file))
if (directUsageOutOfAllowlist.length > 0) {
fail(
'Found component-level direct useTaskTargetStates outside baseline allowlist',
Array.from(new Set(directUsageOutOfAllowlist)),
)
}
const crossDomainLabels = collectMatches(allFiles, 'video.panelCard.generating')
if (crossDomainLabels.length > 0) {
fail('Found cross-domain loading label reuse (video.panelCard.generating)', crossDomainLabels)
}
const uiFiles = allFiles.filter((file) => {
const relPath = toPosixRelative(file)
return relPath.startsWith('src/app/') || relPath.startsWith('src/components/')
})
const legacyGeneratingPatterns = [
'appearance.generating',
'panel.generatingImage',
'shot.generatingImage',
'line.generating',
]
const legacyGeneratingMatches = legacyGeneratingPatterns.flatMap((pattern) =>
collectMatches(uiFiles, pattern),
)
const legacyGeneratingOutOfAllowlist = legacyGeneratingMatches
.map((entry) => entry.split(':')[0])
.filter((file) => !allowedLegacyGeneratingFiles.has(file))
if (legacyGeneratingOutOfAllowlist.length > 0) {
fail(
'Found legacy generating truth usage in UI components',
Array.from(new Set(legacyGeneratingOutOfAllowlist)),
)
}
const hooksIndexPath = path.join(workspaceRoot, 'src/lib/query/hooks/index.ts')
if (fs.existsSync(hooksIndexPath)) {
const hooksIndex = fs.readFileSync(hooksIndexPath, 'utf8')
const bannedReexports = [
{
pattern: /export\s*\{[^}]*useGenerateCharacterImage[^}]*\}\s*from\s*['"]\.\/useGlobalAssets['"]/m,
message: 'hooks/index.ts must not export useGenerateCharacterImage from useGlobalAssets',
},
{
pattern: /export\s*\{[^}]*useGenerateLocationImage[^}]*\}\s*from\s*['"]\.\/useGlobalAssets['"]/m,
message: 'hooks/index.ts must not export useGenerateLocationImage from useGlobalAssets',
},
{
pattern: /export\s*\{[^}]*useGenerateProjectCharacterImage[^}]*\}\s*from\s*['"]\.\/useProjectAssets['"]/m,
message: 'hooks/index.ts must not export useGenerateProjectCharacterImage from useProjectAssets',
},
{
pattern: /export\s*\{[^}]*useGenerateProjectLocationImage[^}]*\}\s*from\s*['"]\.\/useProjectAssets['"]/m,
message: 'hooks/index.ts must not export useGenerateProjectLocationImage from useProjectAssets',
},
]
const violations = bannedReexports
.filter((item) => item.pattern.test(hooksIndex))
.map((item) => item.message)
if (violations.length > 0) {
fail('Found non-canonical mutation re-exports', violations)
}
}
console.log('[task-loading-guard] OK')
@@ -0,0 +1,42 @@
#!/usr/bin/env bash
set -euo pipefail
failed=0
check_absent() {
local label="$1"
local pattern="$2"
shift 2
local output
output="$(git grep --untracked -nE "$pattern" -- "$@" || true)"
if [[ -n "$output" ]]; then
echo "$output"
echo "::error title=${label}::${label}"
failed=1
fi
}
check_absent \
"Do not branch UI status on cancelled" \
"status[[:space:]]*===[[:space:]]*['\\\"]cancelled['\\\"]|status[[:space:]]*==[[:space:]]*['\\\"]cancelled['\\\"]" \
src/app \
src/components \
src/features \
src/lib/query
check_absent \
"useTaskHandoff is forbidden" \
"useTaskHandoff" \
src
check_absent \
"Do not use legacy task hooks in app layer" \
"useActiveTasks\\(|useTaskStatus\\(" \
src/app \
src/features
if [[ "$failed" -ne 0 ]]; then
exit 1
fi
echo "task-state-unification guard passed"
+100
View File
@@ -0,0 +1,100 @@
#!/usr/bin/env bash
set -euo pipefail
ROOT_DIR="$(git rev-parse --show-toplevel)"
cd "$ROOT_DIR"
FAILED=0
print_header() {
echo
echo "============================================================"
echo "$1"
echo "============================================================"
}
print_ok() {
echo "[PASS] $1"
}
print_fail() {
echo "[FAIL] $1"
}
run_zero_match_check() {
local title="$1"
local pattern="$2"
shift 2
local paths=("$@")
local output
output="$(git grep -n -E "$pattern" -- "${paths[@]}" || true)"
if [[ -z "$output" ]]; then
print_ok "$title"
else
print_fail "$title"
echo "$output"
FAILED=1
fi
}
run_usetasktargetstates_check() {
local title="useTaskTargetStates 仅允许在 useProjectAssets/useGlobalAssets 中使用"
local output
output="$(git grep -n "useTaskTargetStates" -- src || true)"
if [[ -z "$output" ]]; then
print_ok "$title (当前 0 命中)"
return
fi
local filtered
filtered="$(echo "$output" | grep -v "src/lib/query/hooks/useProjectAssets.ts" | grep -v "src/lib/query/hooks/useGlobalAssets.ts" || true)"
if [[ -z "$filtered" ]]; then
print_ok "$title"
else
print_fail "$title"
echo "$filtered"
FAILED=1
fi
}
print_header "Task Status Cutover Audit"
run_zero_match_check \
"禁止 useTaskHandoff" \
"useTaskHandoff" \
src
run_zero_match_check \
"禁止 manualRegeneratingItems/setRegeneratingItems/clearRegeneratingItem" \
"manualRegeneratingItems|setRegeneratingItems|clearRegeneratingItem" \
src
run_zero_match_check \
"禁止业务层直接判断 status ===/!== cancelled" \
"status\\s*===\\s*['\\\"]cancelled['\\\"]|status\\s*!==\\s*['\\\"]cancelled['\\\"]" \
src
run_zero_match_check \
"禁止 generatingImage/generatingVideo/generatingLipSync 字段" \
"\\bgeneratingImage\\b|\\bgeneratingVideo\\b|\\bgeneratingLipSync\\b" \
src
run_usetasktargetstates_check
run_zero_match_check \
"禁止 novel-promotion/asset-hub/shared-assets 中 useState(false) 作为生成态命名" \
"const \\[[^\\]]*(Generating|Regenerating|WaitingForGeneration|AnalyzingAssets|GeneratingAll|CopyingFromGlobal)[^\\]]*\\]\\s*=\\s*useState\\(false\\)" \
"src/app/[locale]/workspace/[projectId]/modes/novel-promotion" \
"src/app/[locale]/workspace/asset-hub" \
"src/components/shared/assets"
print_header "Audit Result"
if [[ "$FAILED" -eq 0 ]]; then
echo "All checks passed."
exit 0
fi
echo "Audit failed. Please fix findings above."
exit 1
@@ -0,0 +1,84 @@
#!/usr/bin/env node
import fs from 'fs'
import path from 'path'
import process from 'process'
import { pathToFileURL } from 'url'
const root = process.cwd()
const apiDir = path.join(root, 'src', 'app', 'api')
const CREATE_PATTERN = /\.\s*create\s*\(/
const SUBMIT_TASK_PATTERN = /\bsubmitTask\s*\(/
const ROLLBACK_PATTERN = /rollback|compensat/i
function fail(title, details = []) {
process.stderr.write(`\n[task-submit-compensation-guard] ${title}\n`)
for (const detail of details) {
process.stderr.write(` - ${detail}\n`)
}
process.exit(1)
}
function walk(dir, out = []) {
if (!fs.existsSync(dir)) return out
const entries = fs.readdirSync(dir, { withFileTypes: true })
for (const entry of entries) {
if (entry.name === '.git' || entry.name === '.next' || entry.name === 'node_modules') continue
const fullPath = path.join(dir, entry.name)
if (entry.isDirectory()) {
walk(fullPath, out)
continue
}
if (entry.name === 'route.ts') out.push(fullPath)
}
return out
}
function toRel(fullPath) {
return path.relative(root, fullPath).split(path.sep).join('/')
}
export function inspectTaskSubmitCompensation(relPath, content) {
if (!CREATE_PATTERN.test(content)) return []
if (!SUBMIT_TASK_PATTERN.test(content)) return []
if (ROLLBACK_PATTERN.test(content)) return []
return [
`${relPath} creates data before submitTask without explicit rollback/compensation marker`,
]
}
export function findTaskSubmitCompensationViolations(scanRoot = root) {
const routesRoot = path.join(scanRoot, 'src', 'app', 'api')
return walk(routesRoot)
.map((fullPath) => {
const relPath = path.relative(scanRoot, fullPath).split(path.sep).join('/')
const content = fs.readFileSync(fullPath, 'utf8')
return inspectTaskSubmitCompensation(relPath, content)
})
.flat()
}
export function main() {
if (!fs.existsSync(apiDir)) {
fail('Missing src/app/api directory')
}
const routeFiles = walk(apiDir)
const violations = routeFiles
.map((fullPath) => {
const relPath = toRel(fullPath)
const content = fs.readFileSync(fullPath, 'utf8')
return inspectTaskSubmitCompensation(relPath, content)
})
.flat()
if (violations.length > 0) {
fail('Found create+submitTask routes without compensation marker', violations)
}
process.stdout.write(`[task-submit-compensation-guard] OK routes=${routeFiles.length}\n`)
}
if (process.argv[1] && import.meta.url === pathToFileURL(process.argv[1]).href) {
main()
}
@@ -0,0 +1,96 @@
#!/usr/bin/env node
import fs from 'fs'
import path from 'path'
import process from 'process'
const root = process.cwd()
function fail(title, details = []) {
console.error(`\n[task-target-states-no-polling-guard] ${title}`)
for (const line of details) {
console.error(` - ${line}`)
}
process.exit(1)
}
function readFile(relativePath) {
const fullPath = path.join(root, relativePath)
if (!fs.existsSync(fullPath)) {
fail('Missing required file', [relativePath])
}
return fs.readFileSync(fullPath, 'utf8')
}
function walk(dir, out = []) {
const entries = fs.readdirSync(dir, { withFileTypes: true })
for (const entry of entries) {
if (entry.name === '.git' || entry.name === '.next' || entry.name === 'node_modules') continue
const full = path.join(dir, entry.name)
if (entry.isDirectory()) {
walk(full, out)
} else {
out.push(full)
}
}
return out
}
function toRel(fullPath) {
return path.relative(root, fullPath).split(path.sep).join('/')
}
function collectPattern(pattern) {
const files = walk(path.join(root, 'src'))
const hits = []
for (const fullPath of files) {
if (!fullPath.endsWith('.ts') && !fullPath.endsWith('.tsx')) continue
const text = fs.readFileSync(fullPath, 'utf8')
const lines = text.split('\n')
for (let i = 0; i < lines.length; i += 1) {
if (pattern.test(lines[i])) {
hits.push(`${toRel(fullPath)}:${i + 1}`)
}
}
}
return hits
}
const refetchIntervalMsHits = collectPattern(/\brefetchIntervalMs\b/)
if (refetchIntervalMsHits.length > 0) {
fail('Found forbidden refetchIntervalMs usage', refetchIntervalMsHits)
}
const voiceStagePath =
'src/app/[locale]/workspace/[projectId]/modes/novel-promotion/components/VoiceStage.tsx'
const voiceStageText = readFile(voiceStagePath)
if (voiceStageText.includes('setInterval(')) {
fail('VoiceStage must not use timer polling', [voiceStagePath])
}
const targetStateMapPath = 'src/lib/query/hooks/useTaskTargetStateMap.ts'
const targetStateMapText = readFile(targetStateMapPath)
if (!/refetchInterval:\s*false/.test(targetStateMapText)) {
fail('useTaskTargetStateMap must keep refetchInterval disabled', [targetStateMapPath])
}
const ssePath = 'src/lib/query/hooks/useSSE.ts'
const sseText = readFile(ssePath)
const targetStatesInvalidateExprMatch = sseText.match(
/const shouldInvalidateTargetStates\s*=\s*([\s\S]*?)\n\s*\n/,
)
if (!targetStatesInvalidateExprMatch) {
fail('Unable to locate shouldInvalidateTargetStates expression', [ssePath])
}
const targetStatesInvalidateExpr = targetStatesInvalidateExprMatch[1]
if (!/TASK_EVENT_TYPE\.COMPLETED/.test(targetStatesInvalidateExpr) || !/TASK_EVENT_TYPE\.FAILED/.test(targetStatesInvalidateExpr)) {
fail('useSSE must invalidate target states only for terminal events', [ssePath])
}
if (/TASK_EVENT_TYPE\.CREATED/.test(targetStatesInvalidateExpr)) {
fail('useSSE target-state invalidation must not include CREATED', [ssePath])
}
if (/TASK_EVENT_TYPE\.PROCESSING/.test(targetStatesInvalidateExpr)) {
fail('useSSE target-state invalidation must not include PROCESSING', [ssePath])
}
console.log('[task-target-states-no-polling-guard] OK')
@@ -0,0 +1,85 @@
#!/usr/bin/env node
import fs from 'fs'
import path from 'path'
const root = process.cwd()
const targetDirs = [
path.join(root, 'tests', 'integration', 'api', 'contract'),
path.join(root, 'tests', 'integration', 'provider'),
path.join(root, 'tests', 'integration', 'chain'),
path.join(root, 'tests', 'system'),
path.join(root, 'tests', 'regression'),
]
function fail(title, details = []) {
console.error(`\n[test-behavior-quality-guard] ${title}`)
for (const detail of details) {
console.error(` - ${detail}`)
}
process.exit(1)
}
function walk(dir, out = []) {
if (!fs.existsSync(dir)) return out
const entries = fs.readdirSync(dir, { withFileTypes: true })
for (const entry of entries) {
if (entry.name === '.git' || entry.name === 'node_modules') continue
const full = path.join(dir, entry.name)
if (entry.isDirectory()) {
walk(full, out)
continue
}
if (entry.isFile() && entry.name.endsWith('.test.ts')) out.push(full)
}
return out
}
function toRel(fullPath) {
return path.relative(root, fullPath).split(path.sep).join('/')
}
const files = targetDirs.flatMap((dir) => walk(dir))
if (files.length === 0) {
fail('No target test files found', targetDirs.map((dir) => toRel(dir)))
}
const violations = []
for (const file of files) {
const rel = toRel(file)
const text = fs.readFileSync(file, 'utf8')
const hasSourceRead = /(readFileSync|fs\.readFileSync)\s*\([\s\S]{0,240}src\/(app|lib)\//m.test(text)
if (hasSourceRead) {
violations.push(`${rel}: reading source code text is forbidden in behavior contract/chain tests`)
}
const forbiddenStringContracts = [
/toContain\(\s*['"]apiHandler['"]\s*\)/,
/toContain\(\s*['"]submitTask['"]\s*\)/,
/toContain\(\s*['"]maybeSubmitLLMTask['"]\s*\)/,
/includes\(\s*['"]apiHandler['"]\s*\)/,
/includes\(\s*['"]submitTask['"]\s*\)/,
/includes\(\s*['"]maybeSubmitLLMTask['"]\s*\)/,
]
for (const pattern of forbiddenStringContracts) {
if (pattern.test(text)) {
violations.push(`${rel}: forbidden structural string assertion matched ${pattern}`)
break
}
}
const hasWeakCallAssertion = /toHaveBeenCalled\(\s*\)/.test(text)
const hasStrongCallAssertion = /toHaveBeenCalledWith\(/.test(text)
if (hasWeakCallAssertion && !hasStrongCallAssertion) {
violations.push(`${rel}: has toHaveBeenCalled() without any toHaveBeenCalledWith() result assertions`)
}
}
if (violations.length > 0) {
fail('Behavior quality violations found', violations)
}
console.log(`[test-behavior-quality-guard] OK files=${files.length}`)
@@ -0,0 +1,54 @@
#!/usr/bin/env node
import fs from 'fs'
import path from 'path'
const root = process.cwd()
const catalogPath = path.join(root, 'tests', 'contracts', 'route-catalog.ts')
const matrixPath = path.join(root, 'tests', 'contracts', 'route-behavior-matrix.ts')
function fail(title, details = []) {
console.error(`\n[test-behavior-route-coverage-guard] ${title}`)
for (const detail of details) {
console.error(` - ${detail}`)
}
process.exit(1)
}
if (!fs.existsSync(catalogPath)) {
fail('route catalog is missing', ['tests/contracts/route-catalog.ts'])
}
if (!fs.existsSync(matrixPath)) {
fail('route behavior matrix is missing', ['tests/contracts/route-behavior-matrix.ts'])
}
const catalogText = fs.readFileSync(catalogPath, 'utf8')
const matrixText = fs.readFileSync(matrixPath, 'utf8')
if (!matrixText.includes('ROUTE_CATALOG.map')) {
fail('route behavior matrix must derive entries from ROUTE_CATALOG.map')
}
const routeFilesBlockMatch = catalogText.match(/const ROUTE_FILES = \[([\s\S]*?)\] as const/)
if (!routeFilesBlockMatch) {
fail('unable to parse ROUTE_FILES block from route catalog')
}
const routeFilesBlock = routeFilesBlockMatch ? routeFilesBlockMatch[1] : ''
const routeCount = Array.from(routeFilesBlock.matchAll(/'src\/app\/api\/[^']+\/route\.ts'/g)).length
if (routeCount === 0) {
fail('no routes detected in route catalog')
}
const testFiles = Array.from(matrixText.matchAll(/'tests\/[a-zA-Z0-9_\-/.]+\.test\.ts'/g))
.map((match) => match[0].slice(1, -1))
if (testFiles.length === 0) {
fail('route behavior matrix does not declare any behavior test files')
}
const missingTests = Array.from(new Set(testFiles)).filter((file) => !fs.existsSync(path.join(root, file)))
if (missingTests.length > 0) {
fail('route behavior matrix references missing test files', missingTests)
}
console.log(`[test-behavior-route-coverage-guard] OK routes=${routeCount} tests=${new Set(testFiles).size}`)
@@ -0,0 +1,49 @@
#!/usr/bin/env node
import fs from 'fs'
import path from 'path'
const root = process.cwd()
const catalogPath = path.join(root, 'tests', 'contracts', 'task-type-catalog.ts')
const matrixPath = path.join(root, 'tests', 'contracts', 'tasktype-behavior-matrix.ts')
function fail(title, details = []) {
console.error(`\n[test-behavior-tasktype-coverage-guard] ${title}`)
for (const detail of details) {
console.error(` - ${detail}`)
}
process.exit(1)
}
if (!fs.existsSync(catalogPath)) {
fail('task type catalog is missing', ['tests/contracts/task-type-catalog.ts'])
}
if (!fs.existsSync(matrixPath)) {
fail('tasktype behavior matrix is missing', ['tests/contracts/tasktype-behavior-matrix.ts'])
}
const catalogText = fs.readFileSync(catalogPath, 'utf8')
const matrixText = fs.readFileSync(matrixPath, 'utf8')
if (!matrixText.includes('TASK_TYPE_CATALOG.map')) {
fail('tasktype behavior matrix must derive entries from TASK_TYPE_CATALOG.map')
}
const taskTypeCount = Array.from(catalogText.matchAll(/\[TASK_TYPE\.([A-Z_]+)\]/g)).length
if (taskTypeCount === 0) {
fail('no task types detected in task type catalog')
}
const testFiles = Array.from(matrixText.matchAll(/'tests\/[a-zA-Z0-9_\-/.]+\.test\.ts'/g))
.map((match) => match[0].slice(1, -1))
if (testFiles.length === 0) {
fail('tasktype behavior matrix does not declare any behavior test files')
}
const missingTests = Array.from(new Set(testFiles)).filter((file) => !fs.existsSync(path.join(root, file)))
if (missingTests.length > 0) {
fail('tasktype behavior matrix references missing test files', missingTests)
}
console.log(`[test-behavior-tasktype-coverage-guard] OK taskTypes=${taskTypeCount} tests=${new Set(testFiles).size}`)
@@ -0,0 +1,57 @@
#!/usr/bin/env node
import fs from 'fs'
import path from 'path'
const root = process.cwd()
const apiDir = path.join(root, 'src', 'app', 'api')
const catalogPath = path.join(root, 'tests', 'contracts', 'route-catalog.ts')
function fail(title, details = []) {
console.error(`\n[test-route-coverage-guard] ${title}`)
for (const detail of details) {
console.error(` - ${detail}`)
}
process.exit(1)
}
function walk(dir, out = []) {
if (!fs.existsSync(dir)) return out
const entries = fs.readdirSync(dir, { withFileTypes: true })
for (const entry of entries) {
if (entry.name === '.git' || entry.name === '.next' || entry.name === 'node_modules') continue
const fullPath = path.join(dir, entry.name)
if (entry.isDirectory()) {
walk(fullPath, out)
continue
}
if (entry.name === 'route.ts') out.push(fullPath)
}
return out
}
function toRel(fullPath) {
return path.relative(root, fullPath).split(path.sep).join('/')
}
if (!fs.existsSync(catalogPath)) {
fail('route-catalog.ts is missing', ['tests/contracts/route-catalog.ts'])
}
const actualRoutes = walk(apiDir).map(toRel).sort()
const catalogText = fs.readFileSync(catalogPath, 'utf8')
const catalogRoutes = Array.from(catalogText.matchAll(/'src\/app\/api\/[^']+\/route\.ts'/g))
.map((match) => match[0].slice(1, -1))
.sort()
const missingInCatalog = actualRoutes.filter((routeFile) => !catalogRoutes.includes(routeFile))
const staleInCatalog = catalogRoutes.filter((routeFile) => !actualRoutes.includes(routeFile))
if (missingInCatalog.length > 0) {
fail('Missing routes in tests/contracts/route-catalog.ts', missingInCatalog)
}
if (staleInCatalog.length > 0) {
fail('Stale route entries found in tests/contracts/route-catalog.ts', staleInCatalog)
}
console.log(`[test-route-coverage-guard] OK routes=${actualRoutes.length}`)
@@ -0,0 +1,46 @@
#!/usr/bin/env node
import fs from 'fs'
import path from 'path'
const root = process.cwd()
const taskTypesPath = path.join(root, 'src', 'lib', 'task', 'types.ts')
const catalogPath = path.join(root, 'tests', 'contracts', 'task-type-catalog.ts')
function fail(title, details = []) {
console.error(`\n[test-tasktype-coverage-guard] ${title}`)
for (const detail of details) {
console.error(` - ${detail}`)
}
process.exit(1)
}
if (!fs.existsSync(taskTypesPath)) {
fail('Task type source file is missing', ['src/lib/task/types.ts'])
}
if (!fs.existsSync(catalogPath)) {
fail('Task type catalog file is missing', ['tests/contracts/task-type-catalog.ts'])
}
const taskTypesText = fs.readFileSync(taskTypesPath, 'utf8')
const catalogText = fs.readFileSync(catalogPath, 'utf8')
const taskTypeBlockMatch = taskTypesText.match(/export const TASK_TYPE = \{([\s\S]*?)\n\} as const/)
if (!taskTypeBlockMatch) {
fail('Unable to parse TASK_TYPE block from src/lib/task/types.ts')
}
const taskTypeBlock = taskTypeBlockMatch ? taskTypeBlockMatch[1] : ''
const taskTypeKeys = Array.from(taskTypeBlock.matchAll(/^\s+([A-Z_]+):\s'[^']+',?$/gm)).map((match) => match[1])
const catalogKeys = Array.from(catalogText.matchAll(/\[TASK_TYPE\.([A-Z_]+)\]/g)).map((match) => match[1])
const missingKeys = taskTypeKeys.filter((key) => !catalogKeys.includes(key))
const staleKeys = catalogKeys.filter((key) => !taskTypeKeys.includes(key))
if (missingKeys.length > 0) {
fail('Missing TASK_TYPE owners in tests/contracts/task-type-catalog.ts', missingKeys)
}
if (staleKeys.length > 0) {
fail('Stale TASK_TYPE keys in tests/contracts/task-type-catalog.ts', staleKeys)
}
console.log(`[test-tasktype-coverage-guard] OK taskTypes=${taskTypeKeys.length}`)
+127
View File
@@ -0,0 +1,127 @@
import { logInfo as _ulogInfo, logError as _ulogError } from '@/lib/logging/core'
import { createHash } from 'node:crypto'
import { promises as fs } from 'node:fs'
import path from 'node:path'
import { prisma } from '@/lib/prisma'
import { MEDIA_MODEL_MAPPINGS } from './media-mapping'
const BACKUP_ROOT = path.join(process.cwd(), 'data', 'migration-backups')
const BATCH_SIZE = 500
type DynamicModel = {
findMany: (args: unknown) => Promise<Array<Record<string, unknown>>>
createMany?: (args: unknown) => Promise<unknown>
}
const prismaDynamic = prisma as unknown as Record<string, DynamicModel>
function nowStamp() {
return new Date().toISOString().replace(/[:.]/g, '-')
}
function checksum(value: string) {
return createHash('sha256').update(value).digest('hex')
}
function toSelect(fields: string[]) {
const select: Record<string, true> = { id: true }
for (const field of fields) select[field] = true
return select
}
async function main() {
const runId = nowStamp()
const backupDir = path.join(BACKUP_ROOT, runId)
await fs.mkdir(backupDir, { recursive: true })
const allRows: Array<{
runId: string
tableName: string
rowId: string
fieldName: string
legacyValue: string
checksum: string
}> = []
for (const mapping of MEDIA_MODEL_MAPPINGS) {
const model = prismaDynamic[mapping.model]
if (!model) continue
const select = toSelect(mapping.fields.map((f) => f.legacyField))
let cursor: string | null = null
while (true) {
const page = await model.findMany({
select,
...(cursor
? {
cursor: { id: cursor },
skip: 1,
}
: {}),
orderBy: { id: 'asc' },
take: BATCH_SIZE,
})
if (!page.length) break
for (const row of page) {
for (const field of mapping.fields) {
const value = row[field.legacyField]
if (typeof value !== 'string' || !value.trim()) continue
allRows.push({
runId,
tableName: mapping.tableName,
rowId: String(row.id),
fieldName: field.legacyField,
legacyValue: value,
checksum: checksum(value),
})
}
}
cursor = String(page[page.length - 1].id)
}
}
if (allRows.length > 0) {
try {
const backupModel = prismaDynamic.legacyMediaRefBackup
if (!backupModel?.createMany) {
throw new Error('Prisma model not found: legacyMediaRefBackup')
}
for (let i = 0; i < allRows.length; i += 1000) {
const chunk = allRows.slice(i, i + 1000)
await backupModel.createMany({ data: chunk })
}
} catch (error) {
const message = error instanceof Error ? error.message : String(error)
_ulogError('[media-archive-legacy-refs] db backup table unavailable, fallback to file snapshot only', message)
}
}
const snapshotPath = path.join(backupDir, 'legacy-media-refs.json')
await fs.writeFile(snapshotPath, JSON.stringify(allRows, null, 2), 'utf8')
const snapshotHash = checksum(await fs.readFile(snapshotPath, 'utf8'))
const summary = {
runId,
createdAt: new Date().toISOString(),
backupDir,
archivedCount: allRows.length,
snapshotFile: path.basename(snapshotPath),
snapshotSha256: snapshotHash,
}
await fs.writeFile(path.join(backupDir, 'legacy-media-refs-summary.json'), JSON.stringify(summary, null, 2), 'utf8')
_ulogInfo(`[media-archive-legacy-refs] runId=${runId}`)
_ulogInfo(`[media-archive-legacy-refs] archived=${allRows.length}`)
_ulogInfo(`[media-archive-legacy-refs] snapshot=${snapshotPath}`)
}
main()
.catch((error) => {
_ulogError('[media-archive-legacy-refs] failed:', error)
process.exitCode = 1
})
.finally(async () => {
await prisma.$disconnect()
})
+122
View File
@@ -0,0 +1,122 @@
import { logInfo as _ulogInfo, logError as _ulogError } from '@/lib/logging/core'
import { prisma } from '@/lib/prisma'
import { resolveMediaRefFromLegacyValue } from '@/lib/media/service'
import { MEDIA_MODEL_MAPPINGS } from './media-mapping'
const BATCH_SIZE = 200
type DynamicModel = {
findMany: (args: unknown) => Promise<Array<Record<string, unknown>>>
update: (args: unknown) => Promise<unknown>
}
const prismaDynamic = prisma as unknown as Record<string, DynamicModel>
function toSelect(fields: string[]) {
const select: Record<string, true> = { id: true }
for (const field of fields) select[field] = true
return select
}
async function backfillModel(mapping: (typeof MEDIA_MODEL_MAPPINGS)[number]) {
const model = prismaDynamic[mapping.model]
if (!model) {
throw new Error(`Prisma model not found: ${mapping.model}`)
}
const selectFields = mapping.fields.flatMap((f) => [f.legacyField, f.mediaIdField])
const select = toSelect(selectFields)
let cursor: string | null = null
let scanned = 0
let updated = 0
try {
while (true) {
const rows = await model.findMany({
select,
...(cursor
? {
cursor: { id: cursor },
skip: 1,
}
: {}),
orderBy: { id: 'asc' },
take: BATCH_SIZE,
})
if (!rows.length) break
for (const row of rows) {
scanned += 1
const patch: Record<string, string> = {}
for (const field of mapping.fields) {
const mediaId = row[field.mediaIdField]
const legacyValue = row[field.legacyField]
if (mediaId || typeof legacyValue !== 'string' || !legacyValue.trim()) {
continue
}
const media = await resolveMediaRefFromLegacyValue(legacyValue)
if (!media) continue
patch[field.mediaIdField] = media.id
}
if (Object.keys(patch).length > 0) {
await model.update({
where: { id: String(row.id) },
data: patch,
})
updated += 1
}
}
cursor = String(rows[rows.length - 1].id)
}
} catch (error) {
const message = error instanceof Error ? error.message : String(error)
if (message.includes('does not exist') || message.includes('Unknown column')) {
_ulogError(
`[media-backfill-refs] skip ${mapping.tableName}: migration columns not available yet`,
message,
)
return { scanned: 0, updated: 0, skipped: true }
}
throw error
}
return { scanned, updated, skipped: false }
}
async function main() {
const startedAt = new Date()
_ulogInfo(`[media-backfill-refs] started at ${startedAt.toISOString()}`)
let totalScanned = 0
let totalUpdated = 0
for (const mapping of MEDIA_MODEL_MAPPINGS) {
const result = await backfillModel(mapping)
totalScanned += result.scanned
totalUpdated += result.updated
if (result.skipped) {
_ulogInfo(`[media-backfill-refs] ${mapping.tableName}: skipped (run add-only DB migration first)`)
} else {
_ulogInfo(
`[media-backfill-refs] ${mapping.tableName}: scanned=${result.scanned} updatedRows=${result.updated}`,
)
}
}
_ulogInfo(
`[media-backfill-refs] done scanned=${totalScanned} updatedRows=${totalUpdated} durationMs=${Date.now() - startedAt.getTime()}`,
)
}
main()
.catch((error) => {
_ulogError('[media-backfill-refs] failed:', error)
process.exitCode = 1
})
.finally(async () => {
await prisma.$disconnect()
})
+202
View File
@@ -0,0 +1,202 @@
import { logInfo as _ulogInfo, logError as _ulogError } from '@/lib/logging/core'
import { promises as fs } from 'node:fs'
import path from 'node:path'
import COS from 'cos-nodejs-sdk-v5'
import { prisma } from '@/lib/prisma'
import { resolveStorageKeyFromMediaValue } from '@/lib/media/service'
import { MEDIA_MODEL_MAPPINGS } from './media-mapping'
type StorageEntry = {
key: string
sizeBytes: number
lastModified: string | null
}
type CosBucketPage = {
Contents?: Array<{ Key: string; Size?: string | number; LastModified?: string }>
IsTruncated?: string | boolean
NextMarker?: string
}
type DynamicModel = {
findMany: (args: unknown) => Promise<Array<Record<string, unknown>>>
}
const prismaDynamic = prisma as unknown as Record<string, DynamicModel>
const BACKUP_ROOT = path.join(process.cwd(), 'data', 'migration-backups')
function nowStamp() {
return new Date().toISOString().replace(/[:.]/g, '-')
}
async function listLocalObjects(): Promise<StorageEntry[]> {
const uploadDir = process.env.UPLOAD_DIR || './data/uploads'
const rootDir = path.isAbsolute(uploadDir) ? uploadDir : path.join(process.cwd(), uploadDir)
const exists = await fs.stat(rootDir).then(() => true).catch(() => false)
if (!exists) return []
const rows: StorageEntry[] = []
const queue = ['']
while (queue.length > 0) {
const rel = queue.shift() as string
const full = path.join(rootDir, rel)
const entries = await fs.readdir(full, { withFileTypes: true })
for (const entry of entries) {
const childRel = path.join(rel, entry.name)
if (entry.isDirectory()) {
queue.push(childRel)
continue
}
if (!entry.isFile()) continue
const stat = await fs.stat(path.join(rootDir, childRel))
rows.push({
key: childRel.split(path.sep).join('/'),
sizeBytes: stat.size,
lastModified: stat.mtime.toISOString(),
})
}
}
return rows
}
async function listCosObjects(): Promise<StorageEntry[]> {
const secretId = process.env.COS_SECRET_ID
const secretKey = process.env.COS_SECRET_KEY
const bucket = process.env.COS_BUCKET
const region = process.env.COS_REGION
if (!secretId || !secretKey || !bucket || !region) {
throw new Error('Missing COS env: COS_SECRET_ID/COS_SECRET_KEY/COS_BUCKET/COS_REGION')
}
const cos = new COS({ SecretId: secretId, SecretKey: secretKey, Timeout: 60_000 })
const rows: StorageEntry[] = []
let marker = ''
while (true) {
const page = await new Promise<CosBucketPage>((resolve, reject) => {
cos.getBucket(
{
Bucket: bucket,
Region: region,
Marker: marker,
MaxKeys: 1000,
},
(err, data) => (err ? reject(err) : resolve(data as unknown as CosBucketPage)),
)
})
const contents = page.Contents || []
for (const item of contents) {
rows.push({
key: item.Key,
sizeBytes: Number(item.Size || 0),
lastModified: item.LastModified || null,
})
}
const truncated = String(page.IsTruncated || 'false') === 'true'
if (!truncated) break
const nextMarker = typeof page.NextMarker === 'string' ? page.NextMarker : ''
marker = nextMarker || (contents.length ? contents[contents.length - 1].Key : '')
if (!marker) break
}
return rows
}
async function listStorageObjects() {
const storageType = process.env.STORAGE_TYPE || 'cos'
if (storageType === 'local') {
return { storageType, rows: await listLocalObjects() }
}
return { storageType, rows: await listCosObjects() }
}
async function buildReferencedKeySet() {
const refs = new Set<string>()
try {
const mediaRows = await prismaDynamic.mediaObject.findMany({
select: { storageKey: true },
})
for (const row of mediaRows) {
if (typeof row.storageKey === 'string' && row.storageKey.trim()) refs.add(row.storageKey)
}
} catch (error) {
const message = error instanceof Error ? error.message : String(error)
_ulogError('[media-build-unreferenced-index] media_objects unavailable, fallback to legacy field scan', message)
}
for (const mapping of MEDIA_MODEL_MAPPINGS) {
const model = prismaDynamic[mapping.model]
if (!model) continue
const select: Record<string, true> = { id: true }
for (const field of mapping.fields) select[field.legacyField] = true
let cursor: string | null = null
while (true) {
const rows = await model.findMany({
select,
...(cursor
? {
cursor: { id: cursor },
skip: 1,
}
: {}),
orderBy: { id: 'asc' },
take: 500,
})
if (!rows.length) break
for (const row of rows) {
for (const field of mapping.fields) {
const value = row[field.legacyField]
if (typeof value !== 'string' || !value.trim()) continue
const key = await resolveStorageKeyFromMediaValue(value)
if (key) refs.add(key)
}
}
cursor = String(rows[rows.length - 1].id)
}
}
return refs
}
async function main() {
const stamp = nowStamp()
const backupDir = path.join(BACKUP_ROOT, stamp)
await fs.mkdir(backupDir, { recursive: true })
const referenced = await buildReferencedKeySet()
const storage = await listStorageObjects()
const unreferenced = storage.rows.filter((row) => !referenced.has(row.key))
const output = {
createdAt: new Date().toISOString(),
storageType: storage.storageType,
totalStorageObjects: storage.rows.length,
referencedKeyCount: referenced.size,
unreferencedCount: unreferenced.length,
objects: unreferenced,
}
const filePath = path.join(backupDir, 'unreferenced-storage-objects-index.json')
await fs.writeFile(filePath, JSON.stringify(output, null, 2), 'utf8')
_ulogInfo(`[media-build-unreferenced-index] storageType=${storage.storageType}`)
_ulogInfo(`[media-build-unreferenced-index] total=${storage.rows.length} unreferenced=${unreferenced.length}`)
_ulogInfo(`[media-build-unreferenced-index] output=${filePath}`)
}
main()
.catch((error) => {
_ulogError('[media-build-unreferenced-index] failed:', error)
process.exitCode = 1
})
.finally(async () => {
await prisma.$disconnect()
})
+90
View File
@@ -0,0 +1,90 @@
export type MediaFieldMapping = {
legacyField: string
mediaIdField: string
}
export type MediaModelMapping = {
model: string
tableName: string
fields: MediaFieldMapping[]
}
export const MEDIA_MODEL_MAPPINGS: MediaModelMapping[] = [
{
model: 'characterAppearance',
tableName: 'character_appearances',
fields: [{ legacyField: 'imageUrl', mediaIdField: 'imageMediaId' }],
},
{
model: 'locationImage',
tableName: 'location_images',
fields: [{ legacyField: 'imageUrl', mediaIdField: 'imageMediaId' }],
},
{
model: 'novelPromotionCharacter',
tableName: 'novel_promotion_characters',
fields: [{ legacyField: 'customVoiceUrl', mediaIdField: 'customVoiceMediaId' }],
},
{
model: 'novelPromotionEpisode',
tableName: 'novel_promotion_episodes',
fields: [{ legacyField: 'audioUrl', mediaIdField: 'audioMediaId' }],
},
{
model: 'novelPromotionPanel',
tableName: 'novel_promotion_panels',
fields: [
{ legacyField: 'imageUrl', mediaIdField: 'imageMediaId' },
{ legacyField: 'videoUrl', mediaIdField: 'videoMediaId' },
{ legacyField: 'lipSyncVideoUrl', mediaIdField: 'lipSyncVideoMediaId' },
{ legacyField: 'sketchImageUrl', mediaIdField: 'sketchImageMediaId' },
{ legacyField: 'previousImageUrl', mediaIdField: 'previousImageMediaId' },
],
},
{
model: 'novelPromotionShot',
tableName: 'novel_promotion_shots',
fields: [{ legacyField: 'imageUrl', mediaIdField: 'imageMediaId' }],
},
{
model: 'supplementaryPanel',
tableName: 'supplementary_panels',
fields: [{ legacyField: 'imageUrl', mediaIdField: 'imageMediaId' }],
},
{
model: 'novelPromotionVoiceLine',
tableName: 'novel_promotion_voice_lines',
fields: [{ legacyField: 'audioUrl', mediaIdField: 'audioMediaId' }],
},
{
model: 'voicePreset',
tableName: 'voice_presets',
fields: [{ legacyField: 'audioUrl', mediaIdField: 'audioMediaId' }],
},
{
model: 'globalCharacter',
tableName: 'global_characters',
fields: [{ legacyField: 'customVoiceUrl', mediaIdField: 'customVoiceMediaId' }],
},
{
model: 'globalCharacterAppearance',
tableName: 'global_character_appearances',
fields: [
{ legacyField: 'imageUrl', mediaIdField: 'imageMediaId' },
{ legacyField: 'previousImageUrl', mediaIdField: 'previousImageMediaId' },
],
},
{
model: 'globalLocationImage',
tableName: 'global_location_images',
fields: [
{ legacyField: 'imageUrl', mediaIdField: 'imageMediaId' },
{ legacyField: 'previousImageUrl', mediaIdField: 'previousImageMediaId' },
],
},
{
model: 'globalVoice',
tableName: 'global_voices',
fields: [{ legacyField: 'customVoiceUrl', mediaIdField: 'customVoiceMediaId' }],
},
]
+111
View File
@@ -0,0 +1,111 @@
import { logInfo as _ulogInfo, logError as _ulogError } from '@/lib/logging/core'
import { promises as fs } from 'node:fs'
import path from 'node:path'
import { prisma } from '@/lib/prisma'
const BACKUP_ROOT = path.join(process.cwd(), 'data', 'migration-backups')
type CountMap = Record<string, number>
async function findLatestBackupDir() {
const exists = await fs.stat(BACKUP_ROOT).then(() => true).catch(() => false)
if (!exists) {
throw new Error(`Backup root not found: ${BACKUP_ROOT}`)
}
const dirs = (await fs.readdir(BACKUP_ROOT, { withFileTypes: true }))
.filter((d) => d.isDirectory())
.map((d) => d.name)
.sort()
const validDirs: string[] = []
for (const dir of dirs) {
const metadataPath = path.join(BACKUP_ROOT, dir, 'metadata.json')
const exists = await fs.stat(metadataPath).then(() => true).catch(() => false)
if (exists) validDirs.push(dir)
}
if (!validDirs.length) {
throw new Error(`No backup directories found in ${BACKUP_ROOT}`)
}
return path.join(BACKUP_ROOT, validDirs[validDirs.length - 1])
}
async function readExpectedCounts(backupDir: string): Promise<CountMap> {
const metadataPath = path.join(backupDir, 'metadata.json')
const raw = await fs.readFile(metadataPath, 'utf8')
const parsed = JSON.parse(raw)
return (parsed.tableCounts || {}) as CountMap
}
async function currentCounts(): Promise<CountMap> {
const entries: Array<[string, string]> = [
['projects', 'projects'],
['novel_promotion_projects', 'novel_promotion_projects'],
['novel_promotion_episodes', 'novel_promotion_episodes'],
['novel_promotion_panels', 'novel_promotion_panels'],
['novel_promotion_voice_lines', 'novel_promotion_voice_lines'],
['global_characters', 'global_characters'],
['global_character_appearances', 'global_character_appearances'],
['global_locations', 'global_locations'],
['global_location_images', 'global_location_images'],
['global_voices', 'global_voices'],
['tasks', 'tasks'],
['task_events', 'task_events'],
]
const resolved = await Promise.all(entries.map(async ([name, tableName]) => {
const rows = (await prisma.$queryRawUnsafe(
`SELECT COUNT(*) AS c FROM \`${tableName}\``,
)) as Array<Record<string, unknown>>
const raw = rows[0] || {}
const firstValue = Object.values(raw)[0]
const count = Number(firstValue || 0)
return [name, Number.isFinite(count) ? count : 0] as const
}))
const out: CountMap = {}
for (const [name, count] of resolved) out[name] = count
return out
}
function printDiff(expected: CountMap, actual: CountMap) {
const keys = [...new Set([...Object.keys(expected), ...Object.keys(actual)])].sort()
let hasDiff = false
_ulogInfo('table\texpected\tactual\tdelta')
for (const key of keys) {
const e = expected[key] ?? 0
const a = actual[key] ?? 0
const d = a - e
if (d !== 0) hasDiff = true
_ulogInfo(`${key}\t${e}\t${a}\t${d >= 0 ? '+' : ''}${d}`)
}
return hasDiff
}
async function main() {
const explicit = process.argv.find((arg) => arg.startsWith('--backup='))
const backupDir = explicit ? path.resolve(explicit.split('=')[1]) : await findLatestBackupDir()
_ulogInfo(`[media-restore-dry-run] backupDir=${backupDir}`)
const expected = await readExpectedCounts(backupDir)
const actual = await currentCounts()
const hasDiff = printDiff(expected, actual)
if (hasDiff) {
_ulogInfo('[media-restore-dry-run] drift detected (dry-run only, no writes executed).')
process.exitCode = 2
return
}
_ulogInfo('[media-restore-dry-run] ok: counts match expected snapshot.')
}
main()
.catch((error) => {
_ulogError('[media-restore-dry-run] failed:', error)
process.exitCode = 1
})
.finally(async () => {
await prisma.$disconnect()
})
+247
View File
@@ -0,0 +1,247 @@
import { logInfo as _ulogInfo, logError as _ulogError } from '@/lib/logging/core'
import { createHash } from 'node:crypto'
import { promises as fs } from 'node:fs'
import path from 'node:path'
import COS from 'cos-nodejs-sdk-v5'
import { prisma } from '@/lib/prisma'
type SnapshotTask = {
name: string
tableName: string
}
type StorageIndexRow = {
key: string
hash: string | null
sizeBytes: number
lastModified: string | null
}
type CosBucketPage = {
Contents?: Array<{
Key: string
ETag?: string
Size?: string | number
LastModified?: string
}>
IsTruncated?: string | boolean
NextMarker?: string
}
const BACKUP_ROOT = path.join(process.cwd(), 'data', 'migration-backups')
function nowStamp() {
return new Date().toISOString().replace(/[:.]/g, '-')
}
function toJson(value: unknown) {
return JSON.stringify(
value,
(_key, val) => (typeof val === 'bigint' ? String(val) : val),
2,
)
}
async function writeJson(filePath: string, data: unknown) {
await fs.writeFile(filePath, toJson(data), 'utf8')
}
function sha256Text(input: string) {
return createHash('sha256').update(input).digest('hex')
}
function resolveDatabaseFilePath(databaseUrl: string | undefined): string | null {
if (!databaseUrl) return null
if (databaseUrl.startsWith('file:')) {
const raw = databaseUrl.slice('file:'.length)
if (!raw) return null
return path.isAbsolute(raw) ? raw : path.join(process.cwd(), raw)
}
return null
}
async function listLocalFilesRecursively(rootDir: string, prefix = ''): Promise<StorageIndexRow[]> {
const fullDir = path.join(rootDir, prefix)
const entries = await fs.readdir(fullDir, { withFileTypes: true })
const out: StorageIndexRow[] = []
for (const entry of entries) {
const rel = path.join(prefix, entry.name)
if (entry.isDirectory()) {
out.push(...(await listLocalFilesRecursively(rootDir, rel)))
continue
}
if (!entry.isFile()) continue
const filePath = path.join(rootDir, rel)
const stat = await fs.stat(filePath)
const buf = await fs.readFile(filePath)
out.push({
key: rel.split(path.sep).join('/'),
hash: createHash('sha256').update(buf).digest('hex'),
sizeBytes: stat.size,
lastModified: stat.mtime.toISOString(),
})
}
return out
}
async function listCosObjects(): Promise<StorageIndexRow[]> {
const secretId = process.env.COS_SECRET_ID
const secretKey = process.env.COS_SECRET_KEY
const bucket = process.env.COS_BUCKET
const region = process.env.COS_REGION
if (!secretId || !secretKey || !bucket || !region) {
throw new Error('Missing COS env: COS_SECRET_ID/COS_SECRET_KEY/COS_BUCKET/COS_REGION')
}
const cos = new COS({ SecretId: secretId, SecretKey: secretKey, Timeout: 60_000 })
const out: StorageIndexRow[] = []
let marker = ''
while (true) {
const page = await new Promise<CosBucketPage>((resolve, reject) => {
cos.getBucket(
{
Bucket: bucket,
Region: region,
Marker: marker,
MaxKeys: 1000,
},
(err, data) => (err ? reject(err) : resolve((data || {}) as CosBucketPage)),
)
})
const contents = page.Contents || []
for (const item of contents) {
out.push({
key: item.Key,
hash: item.ETag ? String(item.ETag).replaceAll('"', '') : null,
sizeBytes: Number(item.Size || 0),
lastModified: item.LastModified || null,
})
}
const truncated = String(page.IsTruncated || 'false') === 'true'
if (!truncated) break
marker = page.NextMarker || (contents.length ? contents[contents.length - 1].Key : '')
if (!marker) break
}
return out
}
async function buildStorageIndex(): Promise<{ storageType: string; rows: StorageIndexRow[] }> {
const storageType = process.env.STORAGE_TYPE || 'cos'
if (storageType === 'local') {
const uploadDir = process.env.UPLOAD_DIR || './data/uploads'
const rootDir = path.isAbsolute(uploadDir) ? uploadDir : path.join(process.cwd(), uploadDir)
const exists = await fs.stat(rootDir).then(() => true).catch(() => false)
if (!exists) {
return { storageType, rows: [] }
}
const rows = await listLocalFilesRecursively(rootDir)
return { storageType, rows }
}
const rows = await listCosObjects()
return { storageType, rows }
}
async function snapshotTables(backupDir: string) {
const tasks: SnapshotTask[] = [
{ name: 'projects', tableName: 'projects' },
{ name: 'novel_promotion_projects', tableName: 'novel_promotion_projects' },
{ name: 'novel_promotion_episodes', tableName: 'novel_promotion_episodes' },
{ name: 'novel_promotion_panels', tableName: 'novel_promotion_panels' },
{ name: 'novel_promotion_voice_lines', tableName: 'novel_promotion_voice_lines' },
{ name: 'global_characters', tableName: 'global_characters' },
{ name: 'global_character_appearances', tableName: 'global_character_appearances' },
{ name: 'global_locations', tableName: 'global_locations' },
{ name: 'global_location_images', tableName: 'global_location_images' },
{ name: 'global_voices', tableName: 'global_voices' },
{ name: 'tasks', tableName: 'tasks' },
{ name: 'task_events', tableName: 'task_events' },
]
const counts: Record<string, number> = {}
for (const task of tasks) {
const rows = (await prisma.$queryRawUnsafe(`SELECT * FROM \`${task.tableName}\``)) as unknown[]
counts[task.name] = rows.length
await writeJson(path.join(backupDir, `${task.name}.json`), rows)
}
return counts
}
async function writeChecksums(backupDir: string) {
const files = (await fs.readdir(backupDir)).sort()
const sums: Record<string, string> = {}
for (const file of files) {
const filePath = path.join(backupDir, file)
const stat = await fs.stat(filePath)
if (!stat.isFile()) continue
const buf = await fs.readFile(filePath)
sums[file] = createHash('sha256').update(buf).digest('hex')
}
await writeJson(path.join(backupDir, 'checksums.json'), sums)
}
async function backupDbFile(backupDir: string) {
const dbFile = resolveDatabaseFilePath(process.env.DATABASE_URL)
if (!dbFile) return null
const stat = await fs.stat(dbFile).catch(() => null)
if (!stat || !stat.isFile()) return null
const fileName = path.basename(dbFile)
const target = path.join(backupDir, `db-file-${fileName}`)
await fs.copyFile(dbFile, target)
return path.basename(target)
}
async function main() {
const stamp = nowStamp()
const backupDir = path.join(BACKUP_ROOT, stamp)
await fs.mkdir(backupDir, { recursive: true })
const meta: Record<string, unknown> = {
createdAt: new Date().toISOString(),
backupDir,
databaseUrl: process.env.DATABASE_URL || null,
storageType: process.env.STORAGE_TYPE || 'cos',
nodeEnv: process.env.NODE_ENV || null,
}
const copiedDbFile = await backupDbFile(backupDir)
meta.copiedDbFile = copiedDbFile
const tableCounts = await snapshotTables(backupDir)
meta.tableCounts = tableCounts
const storage = await buildStorageIndex()
meta.storageType = storage.storageType
meta.storageObjectCount = storage.rows.length
await writeJson(path.join(backupDir, 'storage-object-index.json'), storage.rows)
await writeChecksums(backupDir)
meta.metadataChecksum = sha256Text(toJson(meta))
await writeJson(path.join(backupDir, 'metadata.json'), meta)
_ulogInfo(`[media-safety-backup] done: ${backupDir}`)
_ulogInfo(`[media-safety-backup] tableCounts=${JSON.stringify(tableCounts)}`)
_ulogInfo(`[media-safety-backup] storageObjects=${storage.rows.length}`)
}
main()
.catch((error) => {
_ulogError('[media-safety-backup] failed:', error)
process.exitCode = 1
})
.finally(async () => {
await prisma.$disconnect()
})
+72
View File
@@ -0,0 +1,72 @@
import { prisma } from '@/lib/prisma'
const OLD_STATUS = 'cancelled'
const NEW_STATUS = 'failed'
const OLD_EVENT_TYPE = 'task.cancelled'
const NEW_EVENT_TYPE = 'task.failed'
const MIGRATION_ERROR_CODE = 'USER_CANCELLED'
const MIGRATION_ERROR_MESSAGE = '用户已停止任务。'
function log(message: string) {
process.stdout.write(`${message}\n`)
}
function logError(message: string) {
process.stderr.write(`${message}\n`)
}
async function main() {
const totalTasks = await prisma.task.count({
where: { status: OLD_STATUS },
})
const totalEvents = await prisma.taskEvent.count({
where: { eventType: OLD_EVENT_TYPE },
})
log(`[migrate-cancelled-to-failed] matched tasks: ${totalTasks}`)
log(`[migrate-cancelled-to-failed] matched events: ${totalEvents}`)
if (totalTasks === 0 && totalEvents === 0) {
log('[migrate-cancelled-to-failed] no rows to migrate')
return
}
const taskEmptyMessageResult = await prisma.task.updateMany({
where: {
status: OLD_STATUS,
OR: [{ errorMessage: null }, { errorMessage: '' }],
},
data: {
status: NEW_STATUS,
errorCode: MIGRATION_ERROR_CODE,
errorMessage: MIGRATION_ERROR_MESSAGE,
},
})
const taskResult = await prisma.task.updateMany({
where: { status: OLD_STATUS },
data: {
status: NEW_STATUS,
errorCode: MIGRATION_ERROR_CODE,
},
})
const eventResult = await prisma.taskEvent.updateMany({
where: { eventType: OLD_EVENT_TYPE },
data: {
eventType: NEW_EVENT_TYPE,
},
})
log(`[migrate-cancelled-to-failed] updated tasks (empty message): ${taskEmptyMessageResult.count}`)
log(`[migrate-cancelled-to-failed] updated tasks (remaining): ${taskResult.count}`)
log(`[migrate-cancelled-to-failed] updated events: ${eventResult.count}`)
}
main()
.catch((error) => {
logError(`[migrate-cancelled-to-failed] failed: ${error instanceof Error ? error.stack || error.message : String(error)}`)
process.exitCode = 1
})
.finally(async () => {
await prisma.$disconnect()
})
+231
View File
@@ -0,0 +1,231 @@
import { logInfo as _ulogInfo, logError as _ulogError } from '@/lib/logging/core'
import { prisma } from '@/lib/prisma'
import { encodeImageUrls } from '@/lib/contracts/image-urls-contract'
type AppearanceRow = {
id: string
imageUrls: string | null
previousImageUrls: string | null
}
type DynamicModel = {
findMany: (args: unknown) => Promise<AppearanceRow[]>
update: (args: unknown) => Promise<unknown>
}
type FieldName = 'imageUrls' | 'previousImageUrls'
type NormalizeResult = {
next: string
changed: boolean
reason: 'ok' | 'null' | 'invalid_json' | 'not_array' | 'filtered_non_string' | 'normalized_json'
}
type ModelStats = {
scanned: number
updatedRows: number
changedFields: number
reasons: Record<string, number>
}
const BATCH_SIZE = 200
const APPLY = process.argv.includes('--apply')
const MODELS: Array<{ name: string; model: string }> = [
{ name: 'CharacterAppearance', model: 'characterAppearance' },
{ name: 'GlobalCharacterAppearance', model: 'globalCharacterAppearance' },
]
const prismaDynamic = prisma as unknown as Record<string, DynamicModel>
function print(message: string) {
process.stdout.write(`${message}\n`)
}
function normalizeField(raw: string | null): NormalizeResult {
if (raw === null) {
return {
next: encodeImageUrls([]),
changed: true,
reason: 'null',
}
}
try {
const parsed = JSON.parse(raw) as unknown
if (!Array.isArray(parsed)) {
return {
next: encodeImageUrls([]),
changed: true,
reason: 'not_array',
}
}
const stringOnly = parsed.filter((item): item is string => typeof item === 'string')
const next = encodeImageUrls(stringOnly)
if (parsed.length !== stringOnly.length) {
return {
next,
changed: true,
reason: 'filtered_non_string',
}
}
if (raw !== next) {
return {
next,
changed: true,
reason: 'normalized_json',
}
}
return {
next,
changed: false,
reason: 'ok',
}
} catch {
return {
next: encodeImageUrls([]),
changed: true,
reason: 'invalid_json',
}
}
}
async function migrateModel(modelName: string, modelKey: string) {
const model = prismaDynamic[modelKey]
if (!model) {
throw new Error(`Prisma model not found: ${modelKey}`)
}
const stats: ModelStats = {
scanned: 0,
updatedRows: 0,
changedFields: 0,
reasons: {
ok: 0,
null: 0,
invalid_json: 0,
not_array: 0,
filtered_non_string: 0,
normalized_json: 0,
},
}
const samples: Array<{ id: string; field: FieldName; reason: NormalizeResult['reason']; before: string | null; after: string }> = []
let cursor: string | null = null
while (true) {
const rows = await model.findMany({
select: {
id: true,
imageUrls: true,
previousImageUrls: true,
},
...(cursor
? {
cursor: { id: cursor },
skip: 1,
}
: {}),
orderBy: { id: 'asc' },
take: BATCH_SIZE,
})
if (rows.length === 0) break
for (const row of rows) {
stats.scanned += 1
const imageUrlsResult = normalizeField(row.imageUrls)
const previousImageUrlsResult = normalizeField(row.previousImageUrls)
stats.reasons[imageUrlsResult.reason] += 1
stats.reasons[previousImageUrlsResult.reason] += 1
const data: Partial<Record<FieldName, string>> = {}
if (imageUrlsResult.changed) {
data.imageUrls = imageUrlsResult.next
stats.changedFields += 1
if (samples.length < 20) {
samples.push({
id: row.id,
field: 'imageUrls',
reason: imageUrlsResult.reason,
before: row.imageUrls,
after: imageUrlsResult.next,
})
}
}
if (previousImageUrlsResult.changed) {
data.previousImageUrls = previousImageUrlsResult.next
stats.changedFields += 1
if (samples.length < 20) {
samples.push({
id: row.id,
field: 'previousImageUrls',
reason: previousImageUrlsResult.reason,
before: row.previousImageUrls,
after: previousImageUrlsResult.next,
})
}
}
if (Object.keys(data).length > 0) {
stats.updatedRows += 1
if (APPLY) {
await model.update({
where: { id: row.id },
data,
})
}
}
}
cursor = rows[rows.length - 1]?.id || null
}
const summary = `[migrate-image-urls-contract] ${modelName}: scanned=${stats.scanned} updatedRows=${stats.updatedRows} changedFields=${stats.changedFields}`
_ulogInfo(summary)
print(summary)
print(`[migrate-image-urls-contract] ${modelName}: reasons=${JSON.stringify(stats.reasons)}`)
if (samples.length > 0) {
print(`[migrate-image-urls-contract] ${modelName}: sampleChanges=${JSON.stringify(samples, null, 2)}`)
}
return stats
}
async function main() {
print(`[migrate-image-urls-contract] mode=${APPLY ? 'apply' : 'dry-run'}`)
const totals = {
scanned: 0,
updatedRows: 0,
changedFields: 0,
}
for (const target of MODELS) {
const stats = await migrateModel(target.name, target.model)
totals.scanned += stats.scanned
totals.updatedRows += stats.updatedRows
totals.changedFields += stats.changedFields
}
print(`[migrate-image-urls-contract] done scanned=${totals.scanned} updatedRows=${totals.updatedRows} changedFields=${totals.changedFields} mode=${APPLY ? 'apply' : 'dry-run'}`)
}
main()
.catch((error) => {
_ulogError('[migrate-image-urls-contract] failed:', error)
process.exitCode = 1
})
.finally(async () => {
await prisma.$disconnect()
})
+217
View File
@@ -0,0 +1,217 @@
#!/usr/bin/env npx tsx
/**
* 本地存储 → MinIO 迁移脚本
* 使用 @aws-sdk/client-s3(项目已有依赖)
*
* 用法: npx tsx scripts/migrate-local-to-minio.ts
*/
import { S3Client, PutObjectCommand, HeadObjectCommand } from '@aws-sdk/client-s3'
import * as fs from 'fs/promises'
import * as path from 'path'
import { createReadStream } from 'fs'
// ==================== 配置 ====================
const LOCAL_DIR = process.env.LOCAL_UPLOAD_DIR || './data/uploads'
const MINIO_ENDPOINT = process.env.MINIO_ENDPOINT || 'http://127.0.0.1:19000'
const MINIO_BUCKET = process.env.MINIO_BUCKET || 'waoowaoo'
const MINIO_REGION = process.env.MINIO_REGION || 'us-east-1'
const MINIO_ACCESS_KEY = process.env.MINIO_ACCESS_KEY || 'minioadmin'
const MINIO_SECRET_KEY = process.env.MINIO_SECRET_KEY || 'minioadmin'
const CONCURRENCY = parseInt(process.env.MIGRATE_CONCURRENCY || '10')
const DRY_RUN = process.env.MIGRATE_DRY_RUN === 'true'
// ==================== S3 客户端 ====================
const s3 = new S3Client({
endpoint: MINIO_ENDPOINT,
region: MINIO_REGION,
forcePathStyle: true,
credentials: {
accessKeyId: MINIO_ACCESS_KEY,
secretAccessKey: MINIO_SECRET_KEY,
},
})
// ==================== 工具函数 ====================
function guessContentType(filename: string): string {
const ext = path.extname(filename).toLowerCase()
const types: Record<string, string> = {
'.jpg': 'image/jpeg',
'.jpeg': 'image/jpeg',
'.png': 'image/png',
'.gif': 'image/gif',
'.webp': 'image/webp',
'.mp4': 'video/mp4',
'.webm': 'video/webm',
'.mp3': 'audio/mpeg',
'.wav': 'audio/wav',
'.ogg': 'audio/ogg',
'.json': 'application/json',
'.txt': 'text/plain',
}
return types[ext] || 'application/octet-stream'
}
function formatBytes(bytes: number): string {
if (bytes === 0) return '0 B'
const k = 1024
const sizes = ['B', 'KB', 'MB', 'GB']
const i = Math.floor(Math.log(bytes) / Math.log(k))
return parseFloat((bytes / Math.pow(k, i)).toFixed(2)) + ' ' + sizes[i]
}
// ==================== 扫描本地文件 ====================
async function scanLocalFiles(dir: string, basePath = ''): Promise<Array<{ localPath: string; key: string; size: number }>> {
const files: Array<{ localPath: string; key: string; size: number }> = []
try {
const entries = await fs.readdir(dir, { withFileTypes: true })
for (const entry of entries) {
const fullPath = path.join(dir, entry.name)
const relativePath = path.join(basePath, entry.name)
if (entry.isDirectory()) {
files.push(...await scanLocalFiles(fullPath, relativePath))
} else {
// 跳过隐藏文件
if (entry.name.startsWith('.')) continue
const stats = await fs.stat(fullPath)
files.push({
localPath: fullPath,
key: relativePath.replace(/\\/g, '/'),
size: stats.size,
})
}
}
} catch (err: unknown) {
console.error(` ⚠️ 无法读取目录: ${dir}`, (err as Error).message)
}
return files
}
// ==================== 检查文件是否已存在 ====================
async function objectExists(key: string): Promise<boolean> {
try {
await s3.send(new HeadObjectCommand({ Bucket: MINIO_BUCKET, Key: key }))
return true
} catch {
return false
}
}
// ==================== 上传文件 ====================
async function uploadFile(file: { localPath: string; key: string; size: number }): Promise<'success' | 'skipped' | 'error'> {
// 检查是否已存在
if (await objectExists(file.key)) {
return 'skipped'
}
if (DRY_RUN) {
console.log(` [DRY RUN] 将上传: ${file.key} (${formatBytes(file.size)})`)
return 'skipped'
}
try {
const body = await fs.readFile(file.localPath)
await s3.send(new PutObjectCommand({
Bucket: MINIO_BUCKET,
Key: file.key,
Body: body,
ContentType: guessContentType(file.key),
}))
return 'success'
} catch (err: unknown) {
console.error(` ✗ 上传失败: ${file.key}`, (err as Error).message)
return 'error'
}
}
// ==================== 并行控制 ====================
async function runBatched<T>(items: T[], concurrency: number, fn: (item: T) => Promise<void>) {
for (let i = 0; i < items.length; i += concurrency) {
const batch = items.slice(i, i + concurrency)
await Promise.all(batch.map(fn))
}
}
// ==================== 主流程 ====================
async function main() {
console.log()
console.log('╔══════════════════════════════════════════════════════╗')
console.log('║ Local Storage → MinIO Migration Tool ║')
console.log('╚══════════════════════════════════════════════════════╝')
console.log()
console.log(` 📂 源目录: ${path.resolve(LOCAL_DIR)}`)
console.log(` 🪣 目标桶: ${MINIO_ENDPOINT}/${MINIO_BUCKET}`)
console.log(` ⚡ 并发数: ${CONCURRENCY}`)
console.log(` 🔍 干运行: ${DRY_RUN}`)
console.log()
// 1. 扫描文件
console.log('📦 扫描本地文件...')
const files = await scanLocalFiles(LOCAL_DIR)
if (files.length === 0) {
console.log(' 没有需要迁移的文件')
return
}
const totalSize = files.reduce((sum, f) => sum + f.size, 0)
console.log(` 找到 ${files.length} 个文件, 总大小: ${formatBytes(totalSize)}`)
console.log()
// 2. 开始上传
console.log('🚀 开始迁移...')
const startTime = Date.now()
let success = 0
let skipped = 0
let failed = 0
let processed = 0
await runBatched(files, CONCURRENCY, async (file) => {
const result = await uploadFile(file)
processed++
if (result === 'success') {
success++
if (success % 50 === 0 || success <= 5) {
console.log(` ✓ [${processed}/${files.length}] ${file.key} (${formatBytes(file.size)})`)
}
} else if (result === 'skipped') {
skipped++
} else {
failed++
}
if (processed % 100 === 0) {
const pct = ((processed / files.length) * 100).toFixed(1)
console.log(` 📊 进度: ${pct}% (${processed}/${files.length}) | ✓${success}${skipped}${failed}`)
}
})
// 3. 结果
const duration = ((Date.now() - startTime) / 1000).toFixed(1)
console.log()
console.log('╔══════════════════════════════════════════════════════╗')
console.log('║ 迁移完成 ║')
console.log('╠══════════════════════════════════════════════════════╣')
console.log(`║ 总文件: ${String(files.length).padEnd(40)}`)
console.log(`║ 成功: ${String(success).padEnd(40)}`)
console.log(`║ 跳过: ${String(skipped).padEnd(40)}`)
console.log(`║ 失败: ${String(failed).padEnd(40)}`)
console.log(`║ 耗时: ${String(duration + 's').padEnd(40)}`)
console.log(`║ 大小: ${formatBytes(totalSize).padEnd(40)}`)
console.log('╚══════════════════════════════════════════════════════╝')
if (failed > 0) {
console.log()
console.log('⚠️ 有文件上传失败,请重新运行脚本(已上传的会自动跳过)')
process.exit(1)
}
}
main().catch(err => {
console.error('迁移失败:', err)
process.exit(1)
})
+100
View File
@@ -0,0 +1,100 @@
#!/bin/bash
#
# 存储迁移快捷脚本
# 用法: ./scripts/migrate-to-minio.sh [选项]
#
set -e
SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)"
PROJECT_DIR="$(dirname "$SCRIPT_DIR")"
cd "$PROJECT_DIR"
# 颜色输出
RED='\033[0;31m'
GREEN='\033[0;32m'
YELLOW='\033[1;33m'
NC='\033[0m' # No Color
echo -e "${GREEN}══════════════════════════════════════════════════════════${NC}"
echo -e "${GREEN} Local Storage → MinIO Migration Tool${NC}"
echo -e "${GREEN}══════════════════════════════════════════════════════════${NC}"
echo
# 检查 MinIO 是否运行
if ! curl -sf http://127.0.0.1:19000/minio/health/live >/devdev/null 2>&1; then
echo -e "${YELLOW}⚠ MinIO 未检测到在 127.0.0.1:19000${NC}"
echo " 请先启动 MinIO: docker compose up -d minio"
echo
read -p "是否尝试自动启动 MinIO? [Y/n] " -n 1 -r
echo
if [[ ! $REPLY =~ ^[Nn]$ ]]; then
docker compose up -d minio
echo -e "${GREEN}✓ MinIO 启动中,等待 5 秒...${NC}"
sleep 5
else
exit 1
fi
fi
echo -e "${GREEN}✓ MinIO 服务正常${NC}"
echo
# 检查本地数据目录
if [ ! -d "./data/uploads" ]; then
echo -e "${YELLOW}⚠ 本地数据目录 ./data/uploads 不存在${NC}"
echo " 无需迁移"
exit 0
fi
FILE_COUNT=$(find ./data/uploads -type f 2>/dev/null | wc -l)
if [ "$FILE_COUNT" -eq 0 ]; then
echo -e "${YELLOW}⚠ 本地数据目录为空${NC}"
echo " 无需迁移"
exit 0
fi
echo "本地文件数: $FILE_COUNT"
echo
# 运行干运行模式预览
echo -e "${YELLOW}▶ 干运行预览 (Dry Run)...${NC}"
MIGRATE_DRY_RUN=true npx tsx scripts/migrate-to-minio.ts
echo
# 确认执行
read -p "是否开始实际迁移? [y/N] " -n 1 -r
echo
if [[ ! $REPLY =~ ^[Yy]$ ]]; then
echo -e "${YELLOW}已取消迁移${NC}"
exit 0
fi
echo
echo -e "${GREEN}▶ 开始迁移...${NC}"
npx tsx scripts/migrate-to-minio.ts
if [ $? -eq 0 ]; then
echo
echo -e "${GREEN}══════════════════════════════════════════════════════════${NC}"
echo -e "${GREEN} 迁移成功完成!${NC}"
echo -e "${GREEN}══════════════════════════════════════════════════════════${NC}"
echo
echo "后续步骤:"
echo " 1. 验证 MinIO 控制台: http://127.0.0.1:19001"
echo " 账号: minioadmin / minioadmin"
echo " 2. 更新 .env: STORAGE_TYPE=minio"
echo " 3. 重启应用: docker compose restart app"
echo " 4. 测试图片/视频访问"
echo " 5. 确认无误后删除本地数据: rm -rf ./data/uploads"
echo
else
echo
echo -e "${RED}══════════════════════════════════════════════════════════${NC}"
echo -e "${RED} 迁移失败${NC}"
echo -e "${RED}══════════════════════════════════════════════════════════${NC}"
echo
echo "可重新运行继续迁移:"
echo " ./scripts/migrate-to-minio.sh"
exit 1
fi
+343
View File
@@ -0,0 +1,343 @@
#!/usr/bin/env node
/**
* 存储迁移脚本: Local → MinIO
*
* 用途: 将本地文件存储的数据无缝迁移到 MinIO 对象存储
* 特点:
* - 断点续传(记录已迁移文件)
* - 校验和验证
* - 原子性操作(失败可回滚)
* - 并行上传加速
*/
import { Client as MinioClient } from 'minio'
import * as fs from 'fs/promises'
import * as path from 'path'
import { createHash } from 'crypto'
import { createReadStream } from 'fs'
// ==================== 配置 ====================
const CONFIG = {
// 源: 本地存储
local: {
baseDir: process.env.LOCAL_UPLOAD_DIR || './data/uploads',
},
// 目标: MinIO
minio: {
endPoint: process.env.MINIO_ENDPOINT?.replace(/^https?:\/\//, '') || '127.0.0.1',
port: parseInt(process.env.MINIO_PORT || '9000'),
useSSL: process.env.MINIO_USE_SSL === 'true',
accessKey: process.env.MINIO_ACCESS_KEY || 'minioadmin',
secretKey: process.env.MINIO_SECRET_KEY || 'minioadmin',
bucket: process.env.MINIO_BUCKET || 'waoowaoo',
region: process.env.MINIO_REGION || 'us-east-1',
forcePathStyle: process.env.MINIO_FORCE_PATH_STYLE !== 'false',
},
// 迁移选项
options: {
concurrency: parseInt(process.env.MIGRATE_CONCURRENCY || '5'),
dryRun: process.env.MIGRATE_DRY_RUN === 'true',
resume: process.env.MIGRATE_RESUME !== 'false',
progressFile: process.env.MIGRATE_PROGRESS_FILE || './scripts/.migrate-progress.json',
logLevel: process.env.MIGRATE_LOG_LEVEL || 'info', // debug, info, warn, error
}
}
// ==================== 日志 ====================
const LOG_LEVELS = { debug: 0, info: 1, warn: 2, error: 3 }
function log(level: string, message: string, ...args: unknown[]) {
if (LOG_LEVELS[level as keyof typeof LOG_LEVELS] >= LOG_LEVELS[CONFIG.options.logLevel as keyof typeof LOG_LEVELS]) {
const timestamp = new Date().toISOString()
console[level === 'error' ? 'error' : 'log'](`[${timestamp}] [${level.toUpperCase()}] ${message}`, ...args)
}
}
// ==================== MinIO 客户端 ====================
const minioClient = new MinioClient({
endPoint: CONFIG.minio.endPoint,
port: CONFIG.minio.port,
useSSL: CONFIG.minio.useSSL,
accessKey: CONFIG.minio.accessKey,
secretKey: CONFIG.minio.secretKey,
region: CONFIG.minio.region,
})
// ==================== 文件扫描 ====================
async function scanLocalFiles(dir: string, basePath = ''): Promise<Array<{localPath: string, key: string, size: number, mtime: Date}>> {
const files: Array<{localPath: string, key: string, size: number, mtime: Date}> = []
try {
const entries = await fs.readdir(dir, { withFileTypes: true })
for (const entry of entries) {
const fullPath = path.join(dir, entry.name)
const relativePath = path.join(basePath, entry.name)
if (entry.isDirectory()) {
const subFiles = await scanLocalFiles(fullPath, relativePath)
files.push(...subFiles)
} else {
const stats = await fs.stat(fullPath)
files.push({
localPath: fullPath,
key: relativePath.replace(/\\/g, '/'), // 统一使用正斜杠
size: stats.size,
mtime: stats.mtime,
})
}
}
} catch (err: unknown) {
log('warn', `无法读取目录: ${dir}`, (err as Error).message)
}
return files
}
// ==================== 校验和 ====================
async function calculateHash(filePath: string): Promise<string> {
return new Promise((resolve, reject) => {
const hash = createHash('md5')
const stream = createReadStream(filePath)
stream.on('data', chunk => hash.update(chunk))
stream.on('end', () => resolve(hash.digest('hex')))
stream.on('error', reject)
})
}
// ==================== 进度管理 ====================
async function loadProgress(): Promise<Set<string>> {
try {
if (!CONFIG.options.resume) {
return new Set()
}
const data = await fs.readFile(CONFIG.options.progressFile, 'utf-8')
const progress = JSON.parse(data)
return new Set(progress.migrated || [])
} catch {
return new Set()
}
}
async function saveProgress(migratedKeys: Set<string>) {
const progress = {
updatedAt: new Date().toISOString(),
migrated: Array.from(migratedKeys),
}
await fs.writeFile(CONFIG.options.progressFile, JSON.stringify(progress, null, 2))
}
// ==================== 存储桶检查/创建 ====================
async function ensureBucket() {
log('info', `检查存储桶: ${CONFIG.minio.bucket}`)
const exists = await minioClient.bucketExists(CONFIG.minio.bucket)
if (!exists) {
log('info', `创建存储桶: ${CONFIG.minio.bucket}`)
await minioClient.makeBucket(CONFIG.minio.bucket, CONFIG.minio.region)
// 设置存储桶为 public read (可选,根据需求)
const policy = {
Version: '2012-10-17',
Statement: [
{
Effect: 'Allow',
Principal: { AWS: ['*'] },
Action: ['s3:GetObject'],
Resource: [`arn:aws:s3:::${CONFIG.minio.bucket}/*`]
}
]
}
await minioClient.setBucketPolicy(CONFIG.minio.bucket, JSON.stringify(policy))
log('info', '存储桶访问策略已设置为公开读取')
}
}
// ==================== 文件上传 ====================
async function uploadFile(fileInfo: {localPath: string, key: string, size: number}, migratedKeys: Set<string>): Promise<{status: string, key: string, size?: number, error?: string}> {
const { localPath, key, size } = fileInfo
// 检查是否已迁移
if (migratedKeys.has(key)) {
log('debug', `跳过已迁移: ${key}`)
return { status: 'skipped', key }
}
if (CONFIG.options.dryRun) {
log('info', `[DRY RUN] 将上传: ${key} (${formatBytes(size)})`)
return { status: 'dry_run', key }
}
try {
// 计算本地文件 MD5
const localHash = await calculateHash(localPath)
// 上传文件
const fileStream = createReadStream(localPath)
await minioClient.putObject(CONFIG.minio.bucket, key, fileStream, size, {
'Content-Type': guessContentType(key),
'X-Amz-Meta-Original-Hash': localHash,
})
// 验证上传
const stat = await minioClient.statObject(CONFIG.minio.bucket, key)
// 记录迁移成功
migratedKeys.add(key)
log('info', `✓ 上传成功: ${key} (${formatBytes(size)})`)
return { status: 'success', key, size }
} catch (err: unknown) {
log('error', `✗ 上传失败: ${key}`, (err as Error).message)
return { status: 'error', key, error: (err as Error).message }
}
}
// ==================== 内容类型猜测 ====================
function guessContentType(filename: string): string {
const ext = path.extname(filename).toLowerCase()
const types: Record<string, string> = {
'.jpg': 'image/jpeg',
'.jpeg': 'image/jpeg',
'.png': 'image/png',
'.gif': 'image/gif',
'.webp': 'image/webp',
'.mp4': 'video/mp4',
'.webm': 'video/webm',
'.mp3': 'audio/mpeg',
'.wav': 'audio/wav',
'.json': 'application/json',
'.txt': 'text/plain',
}
return types[ext] || 'application/octet-stream'
}
// ==================== 字节格式化 ====================
function formatBytes(bytes: number): string {
if (bytes === 0) return '0 B'
const k = 1024
const sizes = ['B', 'KB', 'MB', 'GB', 'TB']
const i = Math.floor(Math.log(bytes) / Math.log(k))
return parseFloat((bytes / Math.pow(k, i)).toFixed(2)) + ' ' + sizes[i]
}
// ==================== 并行任务控制 ====================
async function runWithConcurrency<T>(tasks: Array<() => Promise<T>>, concurrency: number): Promise<T[]> {
const results: T[] = []
const executing: Promise<void>[] = []
for (const task of tasks) {
const promise = task().then(result => {
results.push(result)
})
executing.push(promise)
if (executing.length >= concurrency) {
await Promise.race(executing)
executing.splice(executing.findIndex(p => p === promise), 1)
}
}
await Promise.all(executing)
return results
}
// ==================== 主流程 ====================
async function main() {
console.log('╔══════════════════════════════════════════════════════════╗')
console.log('║ Local Storage → MinIO Migration Tool ║')
console.log('╚══════════════════════════════════════════════════════════╝')
console.log()
log('info', '配置信息:')
log('info', ` 本地目录: ${path.resolve(CONFIG.local.baseDir)}`)
log('info', ` MinIO: ${CONFIG.minio.endPoint}:${CONFIG.minio.port}/${CONFIG.minio.bucket}`)
log('info', ` 并发数: ${CONFIG.options.concurrency}`)
log('info', ` 干运行: ${CONFIG.options.dryRun}`)
log('info', ` 断点续传: ${CONFIG.options.resume}`)
console.log()
// 1. 扫描本地文件
log('info', '扫描本地文件...')
const files = await scanLocalFiles(CONFIG.local.baseDir)
log('info', `找到 ${files.length} 个文件`)
if (files.length === 0) {
log('info', '没有需要迁移的文件')
return
}
const totalSize = files.reduce((sum, f) => sum + f.size, 0)
log('info', `总大小: ${formatBytes(totalSize)}`)
console.log()
// 2. 加载进度
const migratedKeys = await loadProgress()
log('info', `已迁移: ${migratedKeys.size} 个文件`)
// 3. 确保存储桶存在
await ensureBucket()
// 4. 执行迁移
const startTime = Date.now()
let processed = 0
let success = 0
let failed = 0
let skipped = 0
const uploadTasks = files.map(file => async () => {
const result = await uploadFile(file, migratedKeys)
processed++
if (result.status === 'success') success++
else if (result.status === 'error') failed++
else if (result.status === 'skipped') skipped++
// 每 10 个文件保存一次进度
if (processed % 10 === 0) {
await saveProgress(migratedKeys)
const progress = ((processed / files.length) * 100).toFixed(1)
log('info', `进度: ${progress}% (${processed}/${files.length})`)
}
return result
})
await runWithConcurrency(uploadTasks, CONFIG.options.concurrency)
// 5. 保存最终进度
await saveProgress(migratedKeys)
// 6. 报告
const duration = ((Date.now() - startTime) / 1000).toFixed(1)
console.log()
console.log('╔══════════════════════════════════════════════════════════╗')
console.log('║ 迁移完成 ║')
console.log('╠══════════════════════════════════════════════════════════╣')
console.log(`║ 总文件数: ${String(files.length).padEnd(39)}`)
console.log(`║ 成功: ${String(success).padEnd(39)}`)
console.log(`║ 失败: ${String(failed).padEnd(39)}`)
console.log(`║ 跳过: ${String(skipped).padEnd(39)}`)
console.log(`║ 耗时: ${String(duration + 's').padEnd(39)}`)
console.log('╚══════════════════════════════════════════════════════════╝')
// 7. 后续步骤提示
console.log()
console.log('📋 后续步骤:')
console.log(' 1. 验证 MinIO 中的文件: mc ls local/waoowaoo')
console.log(' 2. 更新 .env: STORAGE_TYPE=minio')
console.log(' 3. 重启应用: docker compose restart app')
console.log(' 4. 测试图片/视频访问是否正常')
console.log(' 5. 确认无误后可删除本地文件: rm -rf ./data/uploads')
if (failed > 0) {
process.exit(1)
}
}
// 运行
main().catch(err => {
log('error', '迁移失败:', err)
process.exit(1)
})
@@ -0,0 +1,310 @@
import { prisma } from '@/lib/prisma'
import {
parseModelKeyStrict,
type CapabilitySelections,
type CapabilityValue,
} from '@/lib/model-config-contract'
import { findBuiltinCapabilities } from '@/lib/model-capabilities/catalog'
const APPLY = process.argv.includes('--apply')
const USER_IMAGE_MODEL_FIELDS = [
'characterModel',
'locationModel',
'storyboardModel',
'editModel',
] as const
const PROJECT_IMAGE_MODEL_FIELDS = [
'characterModel',
'locationModel',
'storyboardModel',
'editModel',
] as const
type UserImageModelField = typeof USER_IMAGE_MODEL_FIELDS[number]
type ProjectImageModelField = typeof PROJECT_IMAGE_MODEL_FIELDS[number]
interface UserPreferenceRow {
id: string
userId: string
imageResolution: string
capabilityDefaults: string | null
characterModel: string | null
locationModel: string | null
storyboardModel: string | null
editModel: string | null
}
interface ProjectRow {
id: string
projectId: string
imageResolution: string
videoResolution: string
capabilityOverrides: string | null
characterModel: string | null
locationModel: string | null
storyboardModel: string | null
editModel: string | null
videoModel: string | null
}
interface MigrationSummary {
mode: 'dry-run' | 'apply'
userPreference: {
scanned: number
updated: number
migratedImageResolution: number
}
novelPromotionProject: {
scanned: number
updated: number
migratedImageResolution: number
migratedVideoResolution: number
}
}
function isRecord(value: unknown): value is Record<string, unknown> {
return !!value && typeof value === 'object' && !Array.isArray(value)
}
function isCapabilityValue(value: unknown): value is CapabilityValue {
return typeof value === 'string' || typeof value === 'number' || typeof value === 'boolean'
}
function normalizeSelections(raw: unknown): CapabilitySelections {
if (!isRecord(raw)) return {}
const normalized: CapabilitySelections = {}
for (const [modelKey, rawSelection] of Object.entries(raw)) {
if (!isRecord(rawSelection)) continue
const nextSelection: Record<string, CapabilityValue> = {}
for (const [field, value] of Object.entries(rawSelection)) {
if (isCapabilityValue(value)) {
nextSelection[field] = value
}
}
normalized[modelKey] = nextSelection
}
return normalized
}
function parseSelections(raw: string | null): CapabilitySelections {
if (!raw) return {}
try {
return normalizeSelections(JSON.parse(raw) as unknown)
} catch {
return {}
}
}
function serializeSelections(selections: CapabilitySelections): string | null {
if (Object.keys(selections).length === 0) return null
return JSON.stringify(selections)
}
function getCapabilityResolutionOptions(
modelType: 'image' | 'video',
modelKey: string,
): string[] {
const parsed = parseModelKeyStrict(modelKey)
if (!parsed) return []
const capabilities = findBuiltinCapabilities(modelType, parsed.provider, parsed.modelId)
const namespace = capabilities?.[modelType]
if (!namespace || !isRecord(namespace)) return []
const resolutionOptions = namespace.resolutionOptions
if (!Array.isArray(resolutionOptions)) return []
return resolutionOptions.filter((item): item is string => typeof item === 'string' && item.trim().length > 0)
}
function ensureModelResolutionSelection(input: {
modelType: 'image' | 'video'
modelKey: string
resolution: string
selections: CapabilitySelections
}): boolean {
const options = getCapabilityResolutionOptions(input.modelType, input.modelKey)
if (options.length === 0) return false
if (!options.includes(input.resolution)) return false
const current = input.selections[input.modelKey]
if (current && current.resolution !== undefined) {
return false
}
input.selections[input.modelKey] = {
...(current || {}),
resolution: input.resolution,
}
return true
}
function collectModelKeys<RowType>(
row: RowType,
fields: readonly (keyof RowType)[],
): string[] {
const modelKeys: string[] = []
for (const field of fields) {
const value = row[field]
if (typeof value !== 'string') continue
const trimmed = value.trim()
if (!trimmed) continue
modelKeys.push(trimmed)
}
return modelKeys
}
async function migrateUserPreferences(summary: MigrationSummary) {
const rows = await prisma.userPreference.findMany({
select: {
id: true,
userId: true,
imageResolution: true,
capabilityDefaults: true,
characterModel: true,
locationModel: true,
storyboardModel: true,
editModel: true,
},
}) as UserPreferenceRow[]
summary.userPreference.scanned = rows.length
for (const row of rows) {
const nextSelections = parseSelections(row.capabilityDefaults)
const modelKeys = collectModelKeys<UserPreferenceRow>(row, USER_IMAGE_MODEL_FIELDS)
let changed = false
for (const modelKey of modelKeys) {
if (ensureModelResolutionSelection({
modelType: 'image',
modelKey,
resolution: row.imageResolution,
selections: nextSelections,
})) {
changed = true
summary.userPreference.migratedImageResolution += 1
}
}
if (!changed) continue
summary.userPreference.updated += 1
if (APPLY) {
await prisma.userPreference.update({
where: { id: row.id },
data: {
capabilityDefaults: serializeSelections(nextSelections),
},
})
}
}
}
async function migrateProjects(summary: MigrationSummary) {
const rows = await prisma.novelPromotionProject.findMany({
select: {
id: true,
projectId: true,
imageResolution: true,
videoResolution: true,
capabilityOverrides: true,
characterModel: true,
locationModel: true,
storyboardModel: true,
editModel: true,
videoModel: true,
},
}) as ProjectRow[]
summary.novelPromotionProject.scanned = rows.length
for (const row of rows) {
const nextSelections = parseSelections(row.capabilityOverrides)
const imageModelKeys = collectModelKeys<ProjectRow>(row, PROJECT_IMAGE_MODEL_FIELDS)
let changed = false
for (const modelKey of imageModelKeys) {
if (ensureModelResolutionSelection({
modelType: 'image',
modelKey,
resolution: row.imageResolution,
selections: nextSelections,
})) {
changed = true
summary.novelPromotionProject.migratedImageResolution += 1
}
}
if (typeof row.videoModel === 'string' && row.videoModel.trim()) {
if (ensureModelResolutionSelection({
modelType: 'video',
modelKey: row.videoModel.trim(),
resolution: row.videoResolution,
selections: nextSelections,
})) {
changed = true
summary.novelPromotionProject.migratedVideoResolution += 1
}
}
if (!changed) continue
summary.novelPromotionProject.updated += 1
if (APPLY) {
await prisma.novelPromotionProject.update({
where: { id: row.id },
data: {
capabilityOverrides: serializeSelections(nextSelections),
},
})
}
}
}
async function main() {
const summary: MigrationSummary = {
mode: APPLY ? 'apply' : 'dry-run',
userPreference: {
scanned: 0,
updated: 0,
migratedImageResolution: 0,
},
novelPromotionProject: {
scanned: 0,
updated: 0,
migratedImageResolution: 0,
migratedVideoResolution: 0,
},
}
await migrateUserPreferences(summary)
await migrateProjects(summary)
process.stdout.write(`${JSON.stringify(summary, null, 2)}\n`)
}
main()
.catch((error: unknown) => {
const message = error instanceof Error ? error.message : String(error)
const missingColumn =
message.includes('capabilityDefaults') || message.includes('capabilityOverrides')
if (missingColumn && message.includes('does not exist')) {
process.stderr.write(
'[migrate-capability-selections] FAILED: required DB columns are missing. ' +
'Apply SQL migration `prisma/migrations/20260215_add_capability_selection_columns.sql` first.\n',
)
} else {
process.stderr.write(`[migrate-capability-selections] FAILED: ${message}\n`)
}
process.exitCode = 1
})
.finally(async () => {
await prisma.$disconnect()
})
@@ -0,0 +1,152 @@
import { prisma } from '@/lib/prisma'
const APPLY = process.argv.includes('--apply')
type PreferenceRow = {
id: string
userId: string
customModels: string | null
}
type MigrationSummary = {
mode: 'dry-run' | 'apply'
scanned: number
updatedRows: number
migratedModels: number
skippedInvalidRows: number
}
function isRecord(value: unknown): value is Record<string, unknown> {
return !!value && typeof value === 'object' && !Array.isArray(value)
}
function parseCustomModels(raw: string | null): unknown[] | null {
if (!raw) return []
try {
const parsed = JSON.parse(raw) as unknown
if (!Array.isArray(parsed)) return null
return parsed
} catch {
return null
}
}
function migrateLegacyCustomPricing(raw: unknown): {
changed: boolean
next: unknown
} {
if (!isRecord(raw)) {
return { changed: false, next: raw }
}
const hasLegacyInput = typeof raw.input === 'number' && Number.isFinite(raw.input) && raw.input >= 0
const hasLegacyOutput = typeof raw.output === 'number' && Number.isFinite(raw.output) && raw.output >= 0
if (!hasLegacyInput && !hasLegacyOutput) {
return { changed: false, next: raw }
}
const llmRaw = isRecord(raw.llm) ? raw.llm : {}
const llmInput = typeof llmRaw.inputPerMillion === 'number' && Number.isFinite(llmRaw.inputPerMillion) && llmRaw.inputPerMillion >= 0
? llmRaw.inputPerMillion
: (hasLegacyInput ? raw.input as number : undefined)
const llmOutput = typeof llmRaw.outputPerMillion === 'number' && Number.isFinite(llmRaw.outputPerMillion) && llmRaw.outputPerMillion >= 0
? llmRaw.outputPerMillion
: (hasLegacyOutput ? raw.output as number : undefined)
const nextPricing: Record<string, unknown> = {}
for (const [key, value] of Object.entries(raw)) {
if (key === 'input' || key === 'output') continue
nextPricing[key] = value
}
nextPricing.llm = {
...(llmInput !== undefined ? { inputPerMillion: llmInput } : {}),
...(llmOutput !== undefined ? { outputPerMillion: llmOutput } : {}),
}
return {
changed: true,
next: nextPricing,
}
}
function migrateCustomModel(rawModel: unknown): { changed: boolean; next: unknown } {
if (!isRecord(rawModel)) {
return { changed: false, next: rawModel }
}
const migratedPricing = migrateLegacyCustomPricing(rawModel.customPricing)
if (!migratedPricing.changed) {
return { changed: false, next: rawModel }
}
return {
changed: true,
next: {
...rawModel,
customPricing: migratedPricing.next,
},
}
}
async function main() {
const summary: MigrationSummary = {
mode: APPLY ? 'apply' : 'dry-run',
scanned: 0,
updatedRows: 0,
migratedModels: 0,
skippedInvalidRows: 0,
}
const rows = await prisma.userPreference.findMany({
select: {
id: true,
userId: true,
customModels: true,
},
}) as PreferenceRow[]
summary.scanned = rows.length
for (const row of rows) {
const parsedModels = parseCustomModels(row.customModels)
if (parsedModels === null) {
summary.skippedInvalidRows += 1
continue
}
let rowChanged = false
const nextModels = parsedModels.map((model) => {
const migrated = migrateCustomModel(model)
if (migrated.changed) {
rowChanged = true
summary.migratedModels += 1
}
return migrated.next
})
if (!rowChanged) continue
summary.updatedRows += 1
if (APPLY) {
await prisma.userPreference.update({
where: { id: row.id },
data: {
customModels: JSON.stringify(nextModels),
},
})
}
}
console.log(JSON.stringify(summary, null, 2))
}
main()
.then(async () => {
await prisma.$disconnect()
})
.catch(async (error: unknown) => {
console.error('[migrate-custom-pricing-v2] failed', error)
await prisma.$disconnect()
process.exit(1)
})
@@ -0,0 +1,80 @@
import { prisma } from '@/lib/prisma'
import { migrateGatewayRoutePayload } from '@/lib/migrations/gateway-route-openai-compat'
const APPLY = process.argv.includes('--apply')
type PreferenceRow = {
id: string
userId: string
customProviders: string | null
}
type MigrationSummary = {
mode: 'dry-run' | 'apply'
scanned: number
updatedRows: number
migratedProviders: number
routeLitellmToOpenaiCompat: number
routeForcedOfficial: number
geminiApiModeCorrected: number
skippedInvalidRows: number
}
async function main() {
const summary: MigrationSummary = {
mode: APPLY ? 'apply' : 'dry-run',
scanned: 0,
updatedRows: 0,
migratedProviders: 0,
routeLitellmToOpenaiCompat: 0,
routeForcedOfficial: 0,
geminiApiModeCorrected: 0,
skippedInvalidRows: 0,
}
const rows = await prisma.userPreference.findMany({
select: {
id: true,
userId: true,
customProviders: true,
},
}) as PreferenceRow[]
summary.scanned = rows.length
for (const row of rows) {
const result = migrateGatewayRoutePayload(row.customProviders)
if (result.status === 'invalid') {
summary.skippedInvalidRows += 1
continue
}
summary.migratedProviders += result.summary.providersChanged
summary.routeLitellmToOpenaiCompat += result.summary.routeLitellmToOpenaiCompat
summary.routeForcedOfficial += result.summary.routeForcedOfficial
summary.geminiApiModeCorrected += result.summary.geminiApiModeCorrected
if (!result.changed) continue
summary.updatedRows += 1
if (APPLY) {
await prisma.userPreference.update({
where: { id: row.id },
data: {
customProviders: result.nextRaw ?? null,
},
})
}
}
console.log(JSON.stringify(summary, null, 2))
}
main()
.then(async () => {
await prisma.$disconnect()
})
.catch(async (error: unknown) => {
console.error('[migrate-gateway-route-openai-compat] failed', error)
await prisma.$disconnect()
process.exit(1)
})
@@ -0,0 +1,145 @@
import { prisma } from '@/lib/prisma'
const APPLY = process.argv.includes('--apply')
const REQUIRED_INDEX_NAME = 'graph_artifacts_runId_stepKey_artifactType_refId_key'
const REQUIRED_COLUMNS = ['runId', 'stepKey', 'artifactType', 'refId'] as const
type IndexRow = {
Key_name: string
Non_unique: number | string
Seq_in_index: number | string
Column_name: string
}
type DuplicateRow = {
runId: string
stepKey: string
artifactType: string
refId: string
c: bigint | number
}
type MigrationSummary = {
mode: 'dry-run' | 'apply'
hasRequiredIndexBefore: boolean
duplicateGroupCount: number
duplicateSamples: Array<{
runId: string
stepKey: string
artifactType: string
refId: string
count: number
}>
altered: boolean
hasRequiredIndexAfter: boolean
}
function parseIntSafe(value: number | string) {
if (typeof value === 'number') return value
return Number.parseInt(value, 10)
}
function hasRequiredUniqueIndex(rows: IndexRow[]) {
const grouped = new Map<string, Array<{ seq: number; column: string; nonUnique: number }>>()
for (const row of rows) {
const seq = parseIntSafe(row.Seq_in_index)
const nonUnique = parseIntSafe(row.Non_unique)
if (!Number.isFinite(seq) || !Number.isFinite(nonUnique)) continue
const key = row.Key_name
const items = grouped.get(key) || []
items.push({
seq,
column: row.Column_name,
nonUnique,
})
grouped.set(key, items)
}
for (const [key, entries] of grouped.entries()) {
if (entries.length !== REQUIRED_COLUMNS.length) continue
const sorted = entries.sort((a, b) => a.seq - b.seq)
if (sorted[0]?.nonUnique !== 0) continue
const columns = sorted.map((entry) => entry.column)
const isTarget = columns.every((column, index) => column === REQUIRED_COLUMNS[index])
if (isTarget && key === REQUIRED_INDEX_NAME) return true
if (isTarget) return true
}
return false
}
function toNumber(value: bigint | number) {
if (typeof value === 'bigint') return Number(value)
return value
}
async function loadIndexRows() {
return await prisma.$queryRawUnsafe<IndexRow[]>('SHOW INDEX FROM graph_artifacts')
}
async function loadDuplicateGroups() {
return await prisma.$queryRawUnsafe<DuplicateRow[]>(
`SELECT runId, stepKey, artifactType, refId, COUNT(*) AS c
FROM graph_artifacts
WHERE stepKey IS NOT NULL
GROUP BY runId, stepKey, artifactType, refId
HAVING c > 1
LIMIT 20`,
)
}
async function main() {
const beforeRows = await loadIndexRows()
const hasBefore = hasRequiredUniqueIndex(beforeRows)
const duplicates = await loadDuplicateGroups()
const summary: MigrationSummary = {
mode: APPLY ? 'apply' : 'dry-run',
hasRequiredIndexBefore: hasBefore,
duplicateGroupCount: duplicates.length,
duplicateSamples: duplicates.map((row) => ({
runId: row.runId,
stepKey: row.stepKey,
artifactType: row.artifactType,
refId: row.refId,
count: toNumber(row.c),
})),
altered: false,
hasRequiredIndexAfter: hasBefore,
}
if (hasBefore) {
console.log(JSON.stringify(summary, null, 2))
return
}
if (duplicates.length > 0) {
throw new Error(
`cannot add unique index; found ${duplicates.length} duplicate groups in graph_artifacts (stepKey IS NOT NULL)`,
)
}
if (APPLY) {
await prisma.$executeRawUnsafe(
`ALTER TABLE graph_artifacts
ADD UNIQUE INDEX ${REQUIRED_INDEX_NAME} (runId, stepKey, artifactType, refId)`,
)
summary.altered = true
const afterRows = await loadIndexRows()
summary.hasRequiredIndexAfter = hasRequiredUniqueIndex(afterRows)
if (!summary.hasRequiredIndexAfter) {
throw new Error('unique index create verification failed')
}
}
console.log(JSON.stringify(summary, null, 2))
}
main()
.then(async () => {
await prisma.$disconnect()
})
.catch(async (error: unknown) => {
console.error('[migrate-graph-artifacts-unique-index] failed', error)
await prisma.$disconnect()
process.exit(1)
})
@@ -0,0 +1,498 @@
import fs from 'fs'
import path from 'path'
import { prisma } from '@/lib/prisma'
import {
composeModelKey,
parseModelKeyStrict,
validateModelCapabilities,
type ModelCapabilities,
type UnifiedModelType,
} from '@/lib/model-config-contract'
type ModelField =
| 'analysisModel'
| 'characterModel'
| 'locationModel'
| 'storyboardModel'
| 'editModel'
| 'videoModel'
type PreferenceRow = {
id: string
userId: string
customModels: string | null
analysisModel: string | null
characterModel: string | null
locationModel: string | null
storyboardModel: string | null
editModel: string | null
videoModel: string | null
}
type ProjectRow = {
id: string
projectId: string
analysisModel: string | null
characterModel: string | null
locationModel: string | null
storyboardModel: string | null
editModel: string | null
videoModel: string | null
project: {
userId: string
}
}
type MigrationIssue = {
table: 'userPreference' | 'novelPromotionProject'
rowId: string
userId?: string
field: string
kind:
| 'CUSTOM_MODELS_JSON_INVALID'
| 'MODEL_SHAPE_INVALID'
| 'MODEL_TYPE_INVALID'
| 'MODEL_KEY_INCOMPLETE'
| 'MODEL_KEY_MISMATCH'
| 'MODEL_CAPABILITY_INVALID'
| 'LEGACY_MODEL_ID_NOT_FOUND'
| 'LEGACY_MODEL_ID_AMBIGUOUS'
rawValue?: string | null
candidates?: string[]
message: string
}
type MigrationReport = {
generatedAt: string
mode: 'dry-run' | 'apply'
userPreference: {
scanned: number
updated: number
updatedCustomModels: number
updatedDefaultFields: number
}
novelPromotionProject: {
scanned: number
updated: number
updatedFields: number
}
issues: MigrationIssue[]
}
type NormalizedModel = {
provider: string
modelId: string
modelKey: string
name: string
type: UnifiedModelType
price: number
resolution?: '2K' | '4K'
capabilities?: ModelCapabilities
}
const APPLY = process.argv.includes('--apply')
const MAX_ISSUES = 500
const MODEL_FIELDS: readonly ModelField[] = [
'analysisModel',
'characterModel',
'locationModel',
'storyboardModel',
'editModel',
'videoModel',
]
const LEGACY_MODEL_ID_MAP = new Map<string, string>([
['anthropic/claude-sonnet-4.5', 'openrouter::anthropic/claude-sonnet-4.5'],
['google/gemini-3-pro-preview', 'openrouter::google/gemini-3-pro-preview'],
['openai/gpt-5.2', 'openrouter::openai/gpt-5.2'],
['banana', 'fal::banana'],
['banana-2k', 'fal::banana'],
['seedream', 'ark::doubao-seedream-4-0-250828'],
['seedream4.5', 'ark::doubao-seedream-4-5-251128'],
['gemini-3-pro-image-preview', 'google::gemini-3-pro-image-preview'],
['gemini-3-pro-image-preview-batch', 'google::gemini-3-pro-image-preview-batch'],
['nano-banana-pro', 'google::gemini-3-pro-image-preview'],
['gemini-3.0-pro-image-portrait', 'flow2api::gemini-3.0-pro-image-portrait'],
['imagen-4.0-ultra-generate-001', 'google::imagen-4.0-ultra-generate-001'],
['doubao-seedance-1-0-pro-250528', 'ark::doubao-seedance-1-0-pro-250528'],
['doubao-seedance-1-0-pro-fast-251015', 'ark::doubao-seedance-1-0-pro-fast-251015'],
['doubao-seedance-1-0-pro-fast-251015-batch', 'ark::doubao-seedance-1-0-pro-fast-251015-batch'],
])
function parseReportPathArg(): string {
const flagPrefix = '--report='
const inline = process.argv.find((arg) => arg.startsWith(flagPrefix))
if (inline) return inline.slice(flagPrefix.length)
const flagIndex = process.argv.findIndex((arg) => arg === '--report')
if (flagIndex !== -1 && process.argv[flagIndex + 1]) {
return process.argv[flagIndex + 1]
}
return 'scripts/migrations/reports/model-config-migration-report.json'
}
function isRecord(value: unknown): value is Record<string, unknown> {
return !!value && typeof value === 'object' && !Array.isArray(value)
}
function toTrimmedString(value: unknown): string {
return typeof value === 'string' ? value.trim() : ''
}
function isUnifiedModelType(value: unknown): value is UnifiedModelType {
return value === 'llm'
|| value === 'image'
|| value === 'video'
|| value === 'audio'
|| value === 'lipsync'
}
function stableStringify(value: unknown): string {
return JSON.stringify(value)
}
function parseCustomModels(raw: string | null): { ok: true; value: unknown[] } | { ok: false } {
if (!raw) return { ok: true, value: [] }
try {
const parsed = JSON.parse(raw) as unknown
if (!Array.isArray(parsed)) return { ok: false }
return { ok: true, value: parsed }
} catch {
return { ok: false }
}
}
function normalizeModel(
raw: unknown,
): { normalized: NormalizedModel | null; changed: boolean; issue?: Omit<MigrationIssue, 'table' | 'rowId'> } {
if (!isRecord(raw)) {
return {
normalized: null,
changed: false,
issue: {
field: 'customModels',
kind: 'MODEL_SHAPE_INVALID',
message: 'customModels item must be object',
},
}
}
const modelType = raw.type
if (!isUnifiedModelType(modelType)) {
return {
normalized: null,
changed: false,
issue: {
field: 'customModels.type',
kind: 'MODEL_TYPE_INVALID',
rawValue: String(raw.type ?? ''),
message: 'custom model type must be llm/image/video/audio/lipsync',
},
}
}
const providerField = toTrimmedString(raw.provider)
const modelIdField = toTrimmedString(raw.modelId)
const parsedModelKey = parseModelKeyStrict(toTrimmedString(raw.modelKey))
const provider = providerField || parsedModelKey?.provider || ''
const modelId = modelIdField || parsedModelKey?.modelId || ''
const modelKey = composeModelKey(provider, modelId)
if (!modelKey) {
return {
normalized: null,
changed: false,
issue: {
field: 'customModels.modelKey',
kind: 'MODEL_KEY_INCOMPLETE',
rawValue: toTrimmedString(raw.modelKey),
message: 'provider/modelId/modelKey cannot compose a valid model_key',
},
}
}
if (parsedModelKey && parsedModelKey.modelKey !== modelKey) {
return {
normalized: null,
changed: false,
issue: {
field: 'customModels.modelKey',
kind: 'MODEL_KEY_MISMATCH',
rawValue: toTrimmedString(raw.modelKey),
message: 'modelKey conflicts with provider/modelId',
},
}
}
const rawResolution = toTrimmedString(raw.resolution)
const resolution = rawResolution === '2K' || rawResolution === '4K' ? rawResolution : undefined
const capabilities = isRecord(raw.capabilities)
? ({ ...(raw.capabilities as ModelCapabilities) })
: undefined
const capabilityIssues = validateModelCapabilities(modelType, capabilities)
if (capabilityIssues.length > 0) {
const firstIssue = capabilityIssues[0]
return {
normalized: null,
changed: false,
issue: {
field: firstIssue.field,
kind: 'MODEL_CAPABILITY_INVALID',
message: `${firstIssue.code}: ${firstIssue.message}`,
},
}
}
const name = toTrimmedString(raw.name) || modelId
const price = typeof raw.price === 'number' && Number.isFinite(raw.price) ? raw.price : 0
const normalized: NormalizedModel = {
provider,
modelId,
modelKey,
name,
type: modelType,
price,
...(resolution ? { resolution } : {}),
...(capabilities ? { capabilities } : {}),
}
const changed = stableStringify(raw) !== stableStringify(normalized)
return { normalized, changed }
}
function addIssue(report: MigrationReport, issue: MigrationIssue) {
if (report.issues.length >= MAX_ISSUES) return
report.issues.push(issue)
}
function normalizeModelFieldValue(
rawValue: string | null,
field: ModelField,
mappingByModelId: Map<string, string[]>,
): { nextValue: string | null; changed: boolean; issue?: Omit<MigrationIssue, 'table' | 'rowId'> } {
if (!rawValue || !rawValue.trim()) return { nextValue: null, changed: rawValue !== null }
const trimmed = rawValue.trim()
const parsed = parseModelKeyStrict(trimmed)
if (parsed) {
return { nextValue: parsed.modelKey, changed: parsed.modelKey !== rawValue }
}
const candidates = mappingByModelId.get(trimmed) || []
if (candidates.length === 1) {
return { nextValue: candidates[0], changed: candidates[0] !== rawValue }
}
if (candidates.length === 0) {
const mappedModelKey = LEGACY_MODEL_ID_MAP.get(trimmed)
if (mappedModelKey) {
return { nextValue: mappedModelKey, changed: mappedModelKey !== rawValue }
}
}
if (candidates.length === 0) {
return {
nextValue: rawValue,
changed: false,
issue: {
field,
kind: 'LEGACY_MODEL_ID_NOT_FOUND',
rawValue,
message: `${field} legacy modelId cannot be mapped`,
},
}
}
return {
nextValue: rawValue,
changed: false,
issue: {
field,
kind: 'LEGACY_MODEL_ID_AMBIGUOUS',
rawValue,
candidates,
message: `${field} legacy modelId maps to multiple providers`,
},
}
}
async function main() {
const reportPath = parseReportPathArg()
const report: MigrationReport = {
generatedAt: new Date().toISOString(),
mode: APPLY ? 'apply' : 'dry-run',
userPreference: {
scanned: 0,
updated: 0,
updatedCustomModels: 0,
updatedDefaultFields: 0,
},
novelPromotionProject: {
scanned: 0,
updated: 0,
updatedFields: 0,
},
issues: [],
}
const userPrefs = await prisma.userPreference.findMany({
select: {
id: true,
userId: true,
customModels: true,
analysisModel: true,
characterModel: true,
locationModel: true,
storyboardModel: true,
editModel: true,
videoModel: true,
},
})
const userMappings = new Map<string, Map<string, string[]>>()
for (const pref of userPrefs) {
report.userPreference.scanned += 1
const updateData: Partial<Record<ModelField | 'customModels', string | null>> = {}
const parsedCustomModels = parseCustomModels(pref.customModels)
const normalizedModels: NormalizedModel[] = []
let customModelsChanged = false
if (!parsedCustomModels.ok) {
addIssue(report, {
table: 'userPreference',
rowId: pref.id,
userId: pref.userId,
field: 'customModels',
kind: 'CUSTOM_MODELS_JSON_INVALID',
rawValue: pref.customModels,
message: 'customModels JSON is invalid',
})
} else {
for (let index = 0; index < parsedCustomModels.value.length; index += 1) {
const normalizedResult = normalizeModel(parsedCustomModels.value[index])
if (normalizedResult.issue) {
addIssue(report, {
table: 'userPreference',
rowId: pref.id,
userId: pref.userId,
...normalizedResult.issue,
})
continue
}
if (normalizedResult.normalized) {
normalizedModels.push(normalizedResult.normalized)
if (normalizedResult.changed) customModelsChanged = true
}
}
}
const mappingByModelId = new Map<string, string[]>()
for (const model of normalizedModels) {
const existing = mappingByModelId.get(model.modelId) || []
if (!existing.includes(model.modelKey)) existing.push(model.modelKey)
mappingByModelId.set(model.modelId, existing)
}
userMappings.set(pref.userId, mappingByModelId)
if (customModelsChanged) {
updateData.customModels = JSON.stringify(normalizedModels)
report.userPreference.updatedCustomModels += 1
}
for (const field of MODEL_FIELDS) {
const normalizedField = normalizeModelFieldValue(pref[field], field, mappingByModelId)
if (normalizedField.issue) {
addIssue(report, {
table: 'userPreference',
rowId: pref.id,
userId: pref.userId,
...normalizedField.issue,
})
}
if (normalizedField.changed) {
updateData[field] = normalizedField.nextValue
report.userPreference.updatedDefaultFields += 1
}
}
if (Object.keys(updateData).length > 0) {
report.userPreference.updated += 1
if (APPLY) {
await prisma.userPreference.update({
where: { id: pref.id },
data: updateData,
})
}
}
}
const projects = await prisma.novelPromotionProject.findMany({
select: {
id: true,
projectId: true,
analysisModel: true,
characterModel: true,
locationModel: true,
storyboardModel: true,
editModel: true,
videoModel: true,
project: {
select: {
userId: true,
},
},
},
})
for (const row of projects as ProjectRow[]) {
report.novelPromotionProject.scanned += 1
const mappingByModelId = userMappings.get(row.project.userId) || new Map<string, string[]>()
const updateData: Partial<Record<ModelField, string | null>> = {}
for (const field of MODEL_FIELDS) {
const normalizedField = normalizeModelFieldValue(row[field], field, mappingByModelId)
if (normalizedField.issue) {
addIssue(report, {
table: 'novelPromotionProject',
rowId: row.id,
userId: row.project.userId,
...normalizedField.issue,
})
}
if (normalizedField.changed) {
updateData[field] = normalizedField.nextValue
report.novelPromotionProject.updatedFields += 1
}
}
if (Object.keys(updateData).length > 0) {
report.novelPromotionProject.updated += 1
if (APPLY) {
await prisma.novelPromotionProject.update({
where: { id: row.id },
data: updateData,
})
}
}
}
const absoluteReportPath = path.isAbsolute(reportPath)
? reportPath
: path.join(process.cwd(), reportPath)
fs.mkdirSync(path.dirname(absoluteReportPath), { recursive: true })
fs.writeFileSync(absoluteReportPath, `${JSON.stringify(report, null, 2)}\n`, 'utf8')
process.stdout.write(
`[migrate-model-config-contract] mode=${report.mode} ` +
`prefs=${report.userPreference.scanned}/${report.userPreference.updated} ` +
`projects=${report.novelPromotionProject.scanned}/${report.novelPromotionProject.updated} ` +
`issues=${report.issues.length} report=${absoluteReportPath}\n`,
)
}
main()
.catch((error) => {
process.stderr.write(`[migrate-model-config-contract] failed: ${String(error)}\n`)
process.exitCode = 1
})
.finally(async () => {
await prisma.$disconnect()
})
@@ -0,0 +1,351 @@
import { prisma } from '@/lib/prisma'
import { composeModelKey, parseModelKeyStrict, type CapabilitySelections } from '@/lib/model-config-contract'
const APPLY = process.argv.includes('--apply')
type PreferenceRow = {
id: string
userId: string
customProviders: string | null
customModels: string | null
analysisModel: string | null
characterModel: string | null
locationModel: string | null
storyboardModel: string | null
editModel: string | null
videoModel: string | null
lipSyncModel: string | null
capabilityDefaults: string | null
}
type StoredProvider = {
id: string
name: string
baseUrl?: string
apiKey?: string
apiMode?: 'gemini-sdk' | 'openai-official'
gatewayRoute?: 'official' | 'litellm'
}
type StoredModel = {
modelId: string
modelKey: string
name: string
type: string
provider: string
price: number
}
type MigrationConflict = {
userId: string
reason: string
}
type MigrationSummary = {
mode: 'dry-run' | 'apply'
scanned: number
updatedRows: number
updatedProviders: number
updatedModels: number
updatedDefaults: number
updatedCapabilityDefaults: number
invalidRows: number
conflicts: MigrationConflict[]
}
type DefaultModelField =
| 'analysisModel'
| 'characterModel'
| 'locationModel'
| 'storyboardModel'
| 'editModel'
| 'videoModel'
| 'lipSyncModel'
const DEFAULT_MODEL_FIELDS: readonly DefaultModelField[] = [
'analysisModel',
'characterModel',
'locationModel',
'storyboardModel',
'editModel',
'videoModel',
'lipSyncModel',
]
function isRecord(value: unknown): value is Record<string, unknown> {
return !!value && typeof value === 'object' && !Array.isArray(value)
}
function readTrimmedString(value: unknown): string {
return typeof value === 'string' ? value.trim() : ''
}
function parseProviders(raw: string | null): StoredProvider[] | null {
if (!raw) return []
try {
const parsed = JSON.parse(raw) as unknown
if (!Array.isArray(parsed)) return null
const providers: StoredProvider[] = []
for (const item of parsed) {
if (!isRecord(item)) return null
const id = readTrimmedString(item.id)
const name = readTrimmedString(item.name)
if (!id || !name) return null
const provider: StoredProvider = { id, name }
if (typeof item.baseUrl === 'string' && item.baseUrl.trim()) provider.baseUrl = item.baseUrl.trim()
if (typeof item.apiKey === 'string' && item.apiKey.trim()) provider.apiKey = item.apiKey.trim()
if (item.apiMode === 'gemini-sdk' || item.apiMode === 'openai-official') provider.apiMode = item.apiMode
if (item.gatewayRoute === 'official' || item.gatewayRoute === 'litellm') provider.gatewayRoute = item.gatewayRoute
providers.push(provider)
}
return providers
} catch {
return null
}
}
function parseModels(raw: string | null): StoredModel[] | null {
if (!raw) return []
try {
const parsed = JSON.parse(raw) as unknown
if (!Array.isArray(parsed)) return null
const models: StoredModel[] = []
for (const item of parsed) {
if (!isRecord(item)) return null
const modelId = readTrimmedString(item.modelId)
const modelKey = readTrimmedString(item.modelKey)
const provider = readTrimmedString(item.provider)
const name = readTrimmedString(item.name)
const type = readTrimmedString(item.type)
const price = typeof item.price === 'number' && Number.isFinite(item.price) ? item.price : 0
if (!modelId || !provider || !type) return null
const normalizedModelKey = modelKey || composeModelKey(provider, modelId)
if (!normalizedModelKey) return null
models.push({
modelId,
modelKey: normalizedModelKey,
provider,
name: name || modelId,
type,
price,
})
}
return models
} catch {
return null
}
}
function parseCapabilityDefaults(raw: string | null): CapabilitySelections | null {
if (!raw) return {}
try {
const parsed = JSON.parse(raw) as unknown
if (!isRecord(parsed)) return null
const selections: CapabilitySelections = {}
for (const [modelKey, value] of Object.entries(parsed)) {
if (!isRecord(value)) continue
const nextSelection: Record<string, string | number | boolean> = {}
for (const [field, option] of Object.entries(value)) {
if (typeof option === 'string' || typeof option === 'number' || typeof option === 'boolean') {
nextSelection[field] = option
}
}
selections[modelKey] = nextSelection
}
return selections
} catch {
return null
}
}
function migrateProviderId(providerId: string): string {
if (providerId === 'qwen') return 'bailian'
const parsed = parseModelKeyStrict(providerId)
if (parsed) return providerId
const marker = providerId.indexOf(':')
if (marker === -1) return providerId
const providerKey = providerId.slice(0, marker)
if (providerKey !== 'qwen') return providerId
return `bailian${providerId.slice(marker)}`
}
function migrateModelKey(rawModelKey: string): string {
const parsed = parseModelKeyStrict(rawModelKey)
if (!parsed) return rawModelKey
if (parsed.provider !== 'qwen') return parsed.modelKey
return composeModelKey('bailian', parsed.modelId)
}
function migrateDefaultModel(rawValue: string | null): string | null {
if (!rawValue) return rawValue
const value = rawValue.trim()
if (!value) return null
return migrateModelKey(value)
}
function hasProviderByKey(providers: StoredProvider[], providerKey: string): boolean {
return providers.some((provider) => {
const marker = provider.id.indexOf(':')
const key = marker === -1 ? provider.id : provider.id.slice(0, marker)
return key === providerKey
})
}
async function main() {
const summary: MigrationSummary = {
mode: APPLY ? 'apply' : 'dry-run',
scanned: 0,
updatedRows: 0,
updatedProviders: 0,
updatedModels: 0,
updatedDefaults: 0,
updatedCapabilityDefaults: 0,
invalidRows: 0,
conflicts: [],
}
const rows = await prisma.userPreference.findMany({
select: {
id: true,
userId: true,
customProviders: true,
customModels: true,
analysisModel: true,
characterModel: true,
locationModel: true,
storyboardModel: true,
editModel: true,
videoModel: true,
lipSyncModel: true,
capabilityDefaults: true,
},
}) as PreferenceRow[]
summary.scanned = rows.length
for (const row of rows) {
const providers = parseProviders(row.customProviders)
const models = parseModels(row.customModels)
const capabilityDefaults = parseCapabilityDefaults(row.capabilityDefaults)
if (!providers || !models || !capabilityDefaults) {
summary.invalidRows += 1
continue
}
const hasQwenProvider = hasProviderByKey(providers, 'qwen')
const hasBailianProvider = hasProviderByKey(providers, 'bailian')
if (hasQwenProvider && hasBailianProvider) {
summary.conflicts.push({
userId: row.userId,
reason: 'both qwen and bailian providers exist',
})
continue
}
let rowChanged = false
const nextProviders = providers.map((provider) => {
const nextId = migrateProviderId(provider.id)
if (nextId !== provider.id) {
rowChanged = true
summary.updatedProviders += 1
}
return {
...provider,
id: nextId,
...(nextId === 'bailian' ? { name: 'Alibaba Bailian' } : {}),
}
})
const nextModels = models.map((model) => {
const nextProvider = migrateProviderId(model.provider)
const nextModelKey = migrateModelKey(model.modelKey)
const changed = nextProvider !== model.provider || nextModelKey !== model.modelKey
if (changed) {
rowChanged = true
summary.updatedModels += 1
}
return {
...model,
provider: nextProvider,
modelKey: nextModelKey,
}
})
const modelKeySet = new Set<string>()
let hasModelConflict = false
for (const model of nextModels) {
if (!modelKeySet.has(model.modelKey)) {
modelKeySet.add(model.modelKey)
continue
}
hasModelConflict = true
break
}
if (hasModelConflict) {
summary.conflicts.push({
userId: row.userId,
reason: 'model key collision after qwen -> bailian migration',
})
continue
}
const nextDefaults: Partial<Record<DefaultModelField, string | null>> = {}
for (const field of DEFAULT_MODEL_FIELDS) {
const current = row[field]
const next = migrateDefaultModel(current)
nextDefaults[field] = next
if ((current || null) !== (next || null)) {
rowChanged = true
summary.updatedDefaults += 1
}
}
const nextCapabilityDefaults: CapabilitySelections = {}
for (const [modelKey, selection] of Object.entries(capabilityDefaults)) {
const nextModelKey = migrateModelKey(modelKey)
nextCapabilityDefaults[nextModelKey] = selection
if (nextModelKey !== modelKey) {
rowChanged = true
summary.updatedCapabilityDefaults += 1
}
}
if (!rowChanged) continue
summary.updatedRows += 1
if (APPLY) {
await prisma.userPreference.update({
where: { id: row.id },
data: {
customProviders: JSON.stringify(nextProviders),
customModels: JSON.stringify(nextModels),
analysisModel: nextDefaults.analysisModel || null,
characterModel: nextDefaults.characterModel || null,
locationModel: nextDefaults.locationModel || null,
storyboardModel: nextDefaults.storyboardModel || null,
editModel: nextDefaults.editModel || null,
videoModel: nextDefaults.videoModel || null,
lipSyncModel: nextDefaults.lipSyncModel || null,
capabilityDefaults: Object.keys(nextCapabilityDefaults).length > 0
? JSON.stringify(nextCapabilityDefaults)
: null,
},
})
}
}
console.log(JSON.stringify(summary, null, 2))
if (summary.conflicts.length > 0) {
process.exitCode = 2
}
}
main()
.then(async () => {
await prisma.$disconnect()
})
.catch(async (error: unknown) => {
console.error('[migrate-qwen-to-bailian] failed', error)
await prisma.$disconnect()
process.exit(1)
})
@@ -0,0 +1,868 @@
import { prisma } from '@/lib/prisma'
import { composeModelKey, parseModelKeyStrict, type CapabilitySelections } from '@/lib/model-config-contract'
type Mode = 'dry-run' | 'apply'
type UserPreferenceRow = {
id: string
userId: string
customProviders: string | null
customModels: string | null
analysisModel: string | null
characterModel: string | null
locationModel: string | null
storyboardModel: string | null
editModel: string | null
videoModel: string | null
audioModel: string | null
lipSyncModel: string | null
capabilityDefaults: string | null
}
type NovelProjectRow = {
id: string
projectId: string
analysisModel: string | null
characterModel: string | null
locationModel: string | null
storyboardModel: string | null
editModel: string | null
videoModel: string | null
capabilityOverrides: string | null
}
type StoredProvider = {
id: string
name: string
baseUrl?: string
apiKey?: string
apiMode?: 'gemini-sdk' | 'openai-official'
gatewayRoute?: 'official' | 'openai-compat'
}
type StoredModel = {
modelId: string
modelKey: string
provider: string
[key: string]: unknown
}
type ParseResult<T> = {
ok: boolean
value: T
}
type MigrationSummary = {
mode: Mode
userPreference: {
scanned: number
updated: number
dirtyClearedProviders: number
dirtyClearedModels: number
dirtyClearedCapabilityDefaults: number
migratedProviders: number
migratedModels: number
migratedDefaultModelFields: number
migratedCapabilityDefaultKeys: number
modelCollisionsResolvedByBailian: number
providerCollisionsResolvedByBailian: number
invalidModelFieldsCleared: number
}
novelPromotionProject: {
scanned: number
updated: number
migratedModelFields: number
migratedCapabilityOverrideKeys: number
invalidModelFieldsCleared: number
dirtyClearedCapabilityOverrides: number
}
graphArtifacts: {
hasRequiredUniqueIndexBefore: boolean
duplicateGroupsBefore: number
duplicateGroupSamples: Array<{
runId: string
stepKey: string
artifactType: string
refId: string
count: number
}>
deletedRowsForDedup: number
duplicateGroupsAfter: number
indexAdded: boolean
hasRequiredUniqueIndexAfter: boolean
}
}
type MysqlIndexRow = {
Key_name: string
Non_unique: number | string
Seq_in_index: number | string
Column_name: string
}
type DuplicateGroupRow = {
runId: string
stepKey: string
artifactType: string
refId: string
c: bigint | number
}
type CountRow = {
c: bigint | number
}
type DefaultModelField =
| 'analysisModel'
| 'characterModel'
| 'locationModel'
| 'storyboardModel'
| 'editModel'
| 'videoModel'
| 'audioModel'
| 'lipSyncModel'
type ProjectModelField =
| 'analysisModel'
| 'characterModel'
| 'locationModel'
| 'storyboardModel'
| 'editModel'
| 'videoModel'
type UserPreferenceUpdateData = Partial<Record<DefaultModelField, string | null>> & {
customProviders?: string | null
customModels?: string | null
capabilityDefaults?: string | null
}
type NovelProjectUpdateData = Partial<Record<ProjectModelField, string | null>> & {
capabilityOverrides?: string | null
}
const MODE: Mode = process.argv.includes('--dry-run') ? 'dry-run' : 'apply'
const APPLY = MODE === 'apply'
const OFFICIAL_ONLY_PROVIDER_KEYS = new Set(['bailian', 'siliconflow'])
const DEFAULT_MODEL_FIELDS: readonly DefaultModelField[] = [
'analysisModel',
'characterModel',
'locationModel',
'storyboardModel',
'editModel',
'videoModel',
'audioModel',
'lipSyncModel',
]
const PROJECT_MODEL_FIELDS: readonly ProjectModelField[] = [
'analysisModel',
'characterModel',
'locationModel',
'storyboardModel',
'editModel',
'videoModel',
]
const REQUIRED_GRAPH_ARTIFACT_UNIQUE_COLUMNS = ['runId', 'stepKey', 'artifactType', 'refId'] as const
function isRecord(value: unknown): value is Record<string, unknown> {
return !!value && typeof value === 'object' && !Array.isArray(value)
}
function readTrimmedString(value: unknown): string {
return typeof value === 'string' ? value.trim() : ''
}
function toNullableModelField(raw: string | null | undefined): string | null {
const trimmed = readTrimmedString(raw)
return trimmed || null
}
function getProviderKey(providerId: string): string {
const index = providerId.indexOf(':')
return index === -1 ? providerId : providerId.slice(0, index)
}
function migrateProviderId(providerId: string): string {
const trimmed = providerId.trim()
if (!trimmed) return trimmed
if (trimmed === 'qwen') return 'bailian'
const providerKey = getProviderKey(trimmed)
if (providerKey !== 'qwen') return trimmed
return `bailian${trimmed.slice(providerKey.length)}`
}
function migrateModelKey(rawModelKey: string): string {
const parsed = parseModelKeyStrict(rawModelKey)
if (!parsed) return rawModelKey
if (getProviderKey(parsed.provider) !== 'qwen') return parsed.modelKey
const nextProvider = migrateProviderId(parsed.provider)
return composeModelKey(nextProvider, parsed.modelId)
}
function providerPriorityByOriginalKey(originalProviderId: string): number {
const key = getProviderKey(originalProviderId)
if (key === 'bailian') return 2
if (key === 'qwen') return 1
return 0
}
function normalizeGatewayRoute(
providerId: string,
rawGatewayRoute: unknown,
): 'official' | 'openai-compat' {
const providerKey = getProviderKey(providerId)
if (providerKey === 'openai-compatible') return 'openai-compat'
if (providerKey === 'gemini-compatible') return 'official'
if (OFFICIAL_ONLY_PROVIDER_KEYS.has(providerKey)) return 'official'
return rawGatewayRoute === 'openai-compat' ? 'openai-compat' : 'official'
}
function parseJsonArray(raw: string | null): ParseResult<unknown[]> {
if (!raw) return { ok: true, value: [] }
try {
const parsed = JSON.parse(raw) as unknown
if (!Array.isArray(parsed)) return { ok: false, value: [] }
return { ok: true, value: parsed }
} catch {
return { ok: false, value: [] }
}
}
function parseJsonRecord(raw: string | null): ParseResult<Record<string, unknown>> {
if (!raw) return { ok: true, value: {} }
try {
const parsed = JSON.parse(raw) as unknown
if (!isRecord(parsed)) return { ok: false, value: {} }
return { ok: true, value: parsed }
} catch {
return { ok: false, value: {} }
}
}
function migrateProviders(
rawProviders: string | null,
): {
ok: boolean
nextRaw: string | null
changed: boolean
migratedProviders: number
collisionsResolvedByBailian: number
} {
const parsed = parseJsonArray(rawProviders)
if (!parsed.ok) {
return {
ok: false,
nextRaw: null,
changed: rawProviders !== null,
migratedProviders: 0,
collisionsResolvedByBailian: 0,
}
}
const deduped = new Map<string, { provider: StoredProvider; priority: number }>()
let migratedProviders = 0
let collisionsResolvedByBailian = 0
for (const item of parsed.value) {
if (!isRecord(item)) {
return {
ok: false,
nextRaw: null,
changed: true,
migratedProviders: 0,
collisionsResolvedByBailian: 0,
}
}
const id = readTrimmedString(item.id)
const name = readTrimmedString(item.name)
if (!id || !name) {
return {
ok: false,
nextRaw: null,
changed: true,
migratedProviders: 0,
collisionsResolvedByBailian: 0,
}
}
const nextId = migrateProviderId(id)
if (nextId !== id) migratedProviders += 1
const apiModeRaw = readTrimmedString(item.apiMode)
let apiMode: 'gemini-sdk' | 'openai-official' | undefined
if (apiModeRaw === 'gemini-sdk' || apiModeRaw === 'openai-official') {
apiMode = apiModeRaw
}
if (getProviderKey(nextId) === 'gemini-compatible' && apiMode === 'openai-official') {
apiMode = 'gemini-sdk'
}
const nextProvider: StoredProvider = {
id: nextId,
name: getProviderKey(nextId) === 'bailian' ? 'Alibaba Bailian' : name,
baseUrl: readTrimmedString(item.baseUrl) || undefined,
apiKey: typeof item.apiKey === 'string' ? item.apiKey.trim() : undefined,
apiMode,
gatewayRoute: normalizeGatewayRoute(nextId, item.gatewayRoute),
}
const dedupeKey = nextProvider.id.toLowerCase()
const nextPriority = providerPriorityByOriginalKey(id)
const existing = deduped.get(dedupeKey)
if (!existing) {
deduped.set(dedupeKey, { provider: nextProvider, priority: nextPriority })
continue
}
if (nextPriority > existing.priority) {
deduped.set(dedupeKey, { provider: nextProvider, priority: nextPriority })
collisionsResolvedByBailian += 1
}
}
const nextProviders = Array.from(deduped.values()).map((entry) => entry.provider)
const nextRaw = nextProviders.length > 0 ? JSON.stringify(nextProviders) : null
return {
ok: true,
nextRaw,
changed: (rawProviders || null) !== (nextRaw || null),
migratedProviders,
collisionsResolvedByBailian,
}
}
function migrateModels(
rawModels: string | null,
): {
ok: boolean
nextRaw: string | null
changed: boolean
migratedModels: number
collisionsResolvedByBailian: number
} {
const parsed = parseJsonArray(rawModels)
if (!parsed.ok) {
return {
ok: false,
nextRaw: null,
changed: rawModels !== null,
migratedModels: 0,
collisionsResolvedByBailian: 0,
}
}
const deduped = new Map<string, { model: StoredModel; priority: number }>()
let migratedModels = 0
let collisionsResolvedByBailian = 0
for (const item of parsed.value) {
if (!isRecord(item)) {
return {
ok: false,
nextRaw: null,
changed: true,
migratedModels: 0,
collisionsResolvedByBailian: 0,
}
}
const providerRaw = readTrimmedString(item.provider)
const modelIdRaw = readTrimmedString(item.modelId)
const modelKeyRaw = readTrimmedString(item.modelKey)
const parsedModelKey = parseModelKeyStrict(modelKeyRaw)
const sourceProvider = providerRaw || parsedModelKey?.provider || ''
const sourceModelId = modelIdRaw || parsedModelKey?.modelId || ''
if (!sourceProvider || !sourceModelId) {
return {
ok: false,
nextRaw: null,
changed: true,
migratedModels: 0,
collisionsResolvedByBailian: 0,
}
}
const nextProvider = migrateProviderId(sourceProvider)
const nextModelKey = composeModelKey(nextProvider, sourceModelId)
if (!nextModelKey) {
return {
ok: false,
nextRaw: null,
changed: true,
migratedModels: 0,
collisionsResolvedByBailian: 0,
}
}
if (nextProvider !== sourceProvider || nextModelKey !== modelKeyRaw) migratedModels += 1
const nextModel: StoredModel = {
...item,
provider: nextProvider,
modelId: sourceModelId,
modelKey: nextModelKey,
}
const dedupeKey = nextModelKey.toLowerCase()
const nextPriority = providerPriorityByOriginalKey(sourceProvider)
const existing = deduped.get(dedupeKey)
if (!existing) {
deduped.set(dedupeKey, { model: nextModel, priority: nextPriority })
continue
}
if (nextPriority > existing.priority) {
deduped.set(dedupeKey, { model: nextModel, priority: nextPriority })
collisionsResolvedByBailian += 1
}
}
const nextModels = Array.from(deduped.values()).map((entry) => entry.model)
const nextRaw = nextModels.length > 0 ? JSON.stringify(nextModels) : null
return {
ok: true,
nextRaw,
changed: (rawModels || null) !== (nextRaw || null),
migratedModels,
collisionsResolvedByBailian,
}
}
function migrateModelField(
raw: string | null,
): {
nextValue: string | null
changed: boolean
migrated: boolean
clearedInvalid: boolean
} {
const current = toNullableModelField(raw)
if (!current) {
return {
nextValue: null,
changed: current !== raw,
migrated: false,
clearedInvalid: false,
}
}
const parsed = parseModelKeyStrict(current)
if (!parsed) {
return {
nextValue: null,
changed: true,
migrated: false,
clearedInvalid: true,
}
}
const nextProvider = migrateProviderId(parsed.provider)
const nextKey = composeModelKey(nextProvider, parsed.modelId)
return {
nextValue: nextKey || null,
changed: (nextKey || null) !== (raw || null),
migrated: parsed.provider !== nextProvider,
clearedInvalid: false,
}
}
function migrateCapabilitySelections(
raw: string | null,
): {
ok: boolean
nextRaw: string | null
changed: boolean
migratedKeys: number
} {
const parsed = parseJsonRecord(raw)
if (!parsed.ok) {
return {
ok: false,
nextRaw: null,
changed: raw !== null,
migratedKeys: 0,
}
}
const deduped: CapabilitySelections = {}
const priorities = new Map<string, number>()
let migratedKeys = 0
for (const [modelKey, rawSelection] of Object.entries(parsed.value)) {
if (!isRecord(rawSelection)) {
return {
ok: false,
nextRaw: null,
changed: raw !== null,
migratedKeys: 0,
}
}
const parsedKey = parseModelKeyStrict(modelKey)
if (!parsedKey) {
return {
ok: false,
nextRaw: null,
changed: raw !== null,
migratedKeys: 0,
}
}
const nextKey = migrateModelKey(parsedKey.modelKey)
if (nextKey !== parsedKey.modelKey) migratedKeys += 1
const nextSelection: Record<string, string | number | boolean> = {}
for (const [field, value] of Object.entries(rawSelection)) {
if (typeof value !== 'string' && typeof value !== 'number' && typeof value !== 'boolean') {
return {
ok: false,
nextRaw: null,
changed: raw !== null,
migratedKeys: 0,
}
}
nextSelection[field] = value
}
const sourcePriority = providerPriorityByOriginalKey(parsedKey.provider)
const existingPriority = priorities.get(nextKey)
if (existingPriority === undefined || sourcePriority > existingPriority) {
deduped[nextKey] = nextSelection
priorities.set(nextKey, sourcePriority)
}
}
const nextRaw = Object.keys(deduped).length > 0 ? JSON.stringify(deduped) : null
return {
ok: true,
nextRaw,
changed: (raw || null) !== (nextRaw || null),
migratedKeys,
}
}
function toIndexNumber(value: number | string): number {
if (typeof value === 'number') return value
return Number.parseInt(value, 10)
}
function hasRequiredGraphArtifactUniqueIndex(rows: MysqlIndexRow[]): boolean {
const indexColumns = new Map<string, Array<{ seq: number; column: string; nonUnique: number }>>()
for (const row of rows) {
const seq = toIndexNumber(row.Seq_in_index)
const nonUnique = toIndexNumber(row.Non_unique)
if (!Number.isFinite(seq) || !Number.isFinite(nonUnique)) continue
const key = row.Key_name
const list = indexColumns.get(key) || []
list.push({
seq,
column: row.Column_name,
nonUnique,
})
indexColumns.set(key, list)
}
for (const entries of indexColumns.values()) {
if (entries.length !== REQUIRED_GRAPH_ARTIFACT_UNIQUE_COLUMNS.length) continue
const sorted = entries.sort((a, b) => a.seq - b.seq)
if (sorted[0]?.nonUnique !== 0) continue
const columns = sorted.map((entry) => entry.column)
const match = columns.every((column, index) => column === REQUIRED_GRAPH_ARTIFACT_UNIQUE_COLUMNS[index])
if (match) return true
}
return false
}
function toNumber(value: bigint | number): number {
if (typeof value === 'bigint') return Number(value)
return value
}
async function loadGraphArtifactIndexes(): Promise<MysqlIndexRow[]> {
return await prisma.$queryRawUnsafe<MysqlIndexRow[]>('SHOW INDEX FROM graph_artifacts')
}
async function countGraphArtifactDuplicateGroups(): Promise<number> {
const rows = await prisma.$queryRawUnsafe<CountRow[]>(
`SELECT COUNT(*) AS c
FROM (
SELECT 1
FROM graph_artifacts
WHERE stepKey IS NOT NULL
GROUP BY runId, stepKey, artifactType, refId
HAVING COUNT(*) > 1
) duplicate_groups`,
)
return rows.length > 0 ? toNumber(rows[0].c) : 0
}
async function sampleGraphArtifactDuplicateGroups(limit: number): Promise<DuplicateGroupRow[]> {
return await prisma.$queryRawUnsafe<DuplicateGroupRow[]>(
`SELECT runId, stepKey, artifactType, refId, COUNT(*) AS c
FROM graph_artifacts
WHERE stepKey IS NOT NULL
GROUP BY runId, stepKey, artifactType, refId
HAVING c > 1
LIMIT ${limit}`,
)
}
async function dedupeGraphArtifacts(): Promise<number> {
return await prisma.$executeRawUnsafe(
`DELETE ga1 FROM graph_artifacts ga1
JOIN graph_artifacts ga2
ON ga1.runId = ga2.runId
AND ga1.stepKey = ga2.stepKey
AND ga1.artifactType = ga2.artifactType
AND ga1.refId = ga2.refId
AND (
ga1.createdAt < ga2.createdAt
OR (ga1.createdAt = ga2.createdAt AND ga1.id < ga2.id)
)
WHERE ga1.stepKey IS NOT NULL`,
)
}
async function addGraphArtifactUniqueIndex(): Promise<void> {
await prisma.$executeRawUnsafe(
'ALTER TABLE graph_artifacts ADD UNIQUE INDEX graph_artifacts_runId_stepKey_artifactType_refId_key (runId, stepKey, artifactType, refId)',
)
}
async function migrateUserPreferences(summary: MigrationSummary): Promise<void> {
const rows = await prisma.userPreference.findMany({
select: {
id: true,
userId: true,
customProviders: true,
customModels: true,
analysisModel: true,
characterModel: true,
locationModel: true,
storyboardModel: true,
editModel: true,
videoModel: true,
audioModel: true,
lipSyncModel: true,
capabilityDefaults: true,
},
}) as UserPreferenceRow[]
summary.userPreference.scanned = rows.length
for (const row of rows) {
const updateData: UserPreferenceUpdateData = {}
let changed = false
const providerResult = migrateProviders(row.customProviders)
if (!providerResult.ok) {
updateData.customProviders = null
changed = changed || row.customProviders !== null
summary.userPreference.dirtyClearedProviders += 1
} else if (providerResult.changed) {
updateData.customProviders = providerResult.nextRaw
changed = true
summary.userPreference.migratedProviders += providerResult.migratedProviders
summary.userPreference.providerCollisionsResolvedByBailian += providerResult.collisionsResolvedByBailian
}
const modelResult = migrateModels(row.customModels)
if (!modelResult.ok) {
updateData.customModels = null
changed = changed || row.customModels !== null
summary.userPreference.dirtyClearedModels += 1
} else if (modelResult.changed) {
updateData.customModels = modelResult.nextRaw
changed = true
summary.userPreference.migratedModels += modelResult.migratedModels
summary.userPreference.modelCollisionsResolvedByBailian += modelResult.collisionsResolvedByBailian
}
const capabilityResult = migrateCapabilitySelections(row.capabilityDefaults)
if (!capabilityResult.ok) {
updateData.capabilityDefaults = null
changed = changed || row.capabilityDefaults !== null
summary.userPreference.dirtyClearedCapabilityDefaults += 1
} else if (capabilityResult.changed) {
updateData.capabilityDefaults = capabilityResult.nextRaw
changed = true
summary.userPreference.migratedCapabilityDefaultKeys += capabilityResult.migratedKeys
}
for (const field of DEFAULT_MODEL_FIELDS) {
const fieldResult = migrateModelField(row[field])
if (!fieldResult.changed) continue
updateData[field] = fieldResult.nextValue
changed = true
if (fieldResult.migrated) {
summary.userPreference.migratedDefaultModelFields += 1
}
if (fieldResult.clearedInvalid) {
summary.userPreference.invalidModelFieldsCleared += 1
}
}
if (!changed) continue
summary.userPreference.updated += 1
if (APPLY) {
await prisma.userPreference.update({
where: { id: row.id },
data: updateData,
})
}
}
}
async function migrateNovelProjects(summary: MigrationSummary): Promise<void> {
const rows = await prisma.novelPromotionProject.findMany({
select: {
id: true,
projectId: true,
analysisModel: true,
characterModel: true,
locationModel: true,
storyboardModel: true,
editModel: true,
videoModel: true,
capabilityOverrides: true,
},
}) as NovelProjectRow[]
summary.novelPromotionProject.scanned = rows.length
for (const row of rows) {
const updateData: NovelProjectUpdateData = {}
let changed = false
for (const field of PROJECT_MODEL_FIELDS) {
const fieldResult = migrateModelField(row[field])
if (!fieldResult.changed) continue
updateData[field] = fieldResult.nextValue
changed = true
if (fieldResult.migrated) {
summary.novelPromotionProject.migratedModelFields += 1
}
if (fieldResult.clearedInvalid) {
summary.novelPromotionProject.invalidModelFieldsCleared += 1
}
}
const capabilityResult = migrateCapabilitySelections(row.capabilityOverrides)
if (!capabilityResult.ok) {
updateData.capabilityOverrides = null
changed = changed || row.capabilityOverrides !== null
summary.novelPromotionProject.dirtyClearedCapabilityOverrides += 1
} else if (capabilityResult.changed) {
updateData.capabilityOverrides = capabilityResult.nextRaw
changed = true
summary.novelPromotionProject.migratedCapabilityOverrideKeys += capabilityResult.migratedKeys
}
if (!changed) continue
summary.novelPromotionProject.updated += 1
if (APPLY) {
await prisma.novelPromotionProject.update({
where: { id: row.id },
data: updateData,
})
}
}
}
async function migrateGraphArtifacts(summary: MigrationSummary): Promise<void> {
const beforeIndexes = await loadGraphArtifactIndexes()
const hasRequiredBefore = hasRequiredGraphArtifactUniqueIndex(beforeIndexes)
const duplicateGroupsBefore = await countGraphArtifactDuplicateGroups()
const duplicateGroupSamples = await sampleGraphArtifactDuplicateGroups(20)
summary.graphArtifacts.hasRequiredUniqueIndexBefore = hasRequiredBefore
summary.graphArtifacts.duplicateGroupsBefore = duplicateGroupsBefore
summary.graphArtifacts.duplicateGroupSamples = duplicateGroupSamples.map((row) => ({
runId: row.runId,
stepKey: row.stepKey,
artifactType: row.artifactType,
refId: row.refId,
count: toNumber(row.c),
}))
if (APPLY && duplicateGroupsBefore > 0) {
const deleted = await dedupeGraphArtifacts()
summary.graphArtifacts.deletedRowsForDedup = deleted
}
const duplicateGroupsAfter = APPLY ? await countGraphArtifactDuplicateGroups() : duplicateGroupsBefore
summary.graphArtifacts.duplicateGroupsAfter = duplicateGroupsAfter
if (APPLY && !hasRequiredBefore) {
if (duplicateGroupsAfter > 0) {
throw new Error(
`GRAPH_ARTIFACT_DEDUPE_INCOMPLETE: still has ${duplicateGroupsAfter} duplicate groups, unique index not added`,
)
}
await addGraphArtifactUniqueIndex()
summary.graphArtifacts.indexAdded = true
}
const afterIndexes = await loadGraphArtifactIndexes()
summary.graphArtifacts.hasRequiredUniqueIndexAfter = hasRequiredGraphArtifactUniqueIndex(afterIndexes)
if (APPLY && !summary.graphArtifacts.hasRequiredUniqueIndexAfter) {
throw new Error('GRAPH_ARTIFACT_UNIQUE_INDEX_MISSING_AFTER_MIGRATION')
}
}
async function main() {
const summary: MigrationSummary = {
mode: MODE,
userPreference: {
scanned: 0,
updated: 0,
dirtyClearedProviders: 0,
dirtyClearedModels: 0,
dirtyClearedCapabilityDefaults: 0,
migratedProviders: 0,
migratedModels: 0,
migratedDefaultModelFields: 0,
migratedCapabilityDefaultKeys: 0,
modelCollisionsResolvedByBailian: 0,
providerCollisionsResolvedByBailian: 0,
invalidModelFieldsCleared: 0,
},
novelPromotionProject: {
scanned: 0,
updated: 0,
migratedModelFields: 0,
migratedCapabilityOverrideKeys: 0,
invalidModelFieldsCleared: 0,
dirtyClearedCapabilityOverrides: 0,
},
graphArtifacts: {
hasRequiredUniqueIndexBefore: false,
duplicateGroupsBefore: 0,
duplicateGroupSamples: [],
deletedRowsForDedup: 0,
duplicateGroupsAfter: 0,
indexAdded: false,
hasRequiredUniqueIndexAfter: false,
},
}
await migrateUserPreferences(summary)
await migrateNovelProjects(summary)
await migrateGraphArtifacts(summary)
console.log(JSON.stringify(summary, null, 2))
}
main()
.then(async () => {
await prisma.$disconnect()
})
.catch(async (error: unknown) => {
console.error('[migrate-release-blockers] failed', error)
await prisma.$disconnect()
process.exit(1)
})
File diff suppressed because it is too large Load Diff
@@ -0,0 +1,16 @@
{
"generatedAt": "2026-02-12T12:53:18.381Z",
"mode": "apply",
"userPreference": {
"scanned": 7,
"updated": 4,
"updatedCustomModels": 0,
"updatedDefaultFields": 24
},
"novelPromotionProject": {
"scanned": 70,
"updated": 40,
"updatedFields": 106
},
"issues": []
}
@@ -0,0 +1,16 @@
{
"generatedAt": "2026-02-12T12:53:12.288Z",
"mode": "dry-run",
"userPreference": {
"scanned": 7,
"updated": 4,
"updatedCustomModels": 0,
"updatedDefaultFields": 24
},
"novelPromotionProject": {
"scanned": 70,
"updated": 40,
"updatedFields": 106
},
"issues": []
}
File diff suppressed because it is too large Load Diff
+53
View File
@@ -0,0 +1,53 @@
import { logInfo as _ulogInfo, logError as _ulogError } from '@/lib/logging/core'
import { prisma } from '@/lib/prisma'
function parseMinutesArg() {
const raw = process.argv.find((arg) => arg.startsWith('--minutes='))
const value = raw ? Number.parseInt(raw.split('=')[1], 10) : 5
return Number.isFinite(value) && value > 0 ? value : 5
}
async function main() {
const minutes = parseMinutesArg()
const since = new Date(Date.now() - minutes * 60_000)
const rows = await prisma.task.groupBy({
by: ['errorCode'],
where: {
status: 'failed',
finishedAt: { gte: since },
},
_count: {
_all: true,
},
orderBy: {
_count: {
errorCode: 'desc',
},
},
})
const total = rows.reduce((sum: number, row) => sum + (row._count?._all || 0), 0)
_ulogInfo(`[TaskErrorStats] window=${minutes}m failed_total=${total}`)
if (!rows.length) {
_ulogInfo('No failed tasks in the selected window.')
return
}
for (const row of rows) {
const code = row.errorCode || 'UNKNOWN'
const count = row?._count?._all || 0
const ratio = total > 0 ? ((count / total) * 100).toFixed(1) : '0.0'
_ulogInfo(`${code}\t${count}\t${ratio}%`)
}
}
main()
.catch((error) => {
_ulogError('[TaskErrorStats] failed:', error?.message || error)
process.exit(1)
})
.finally(async () => {
await prisma.$disconnect()
})
+161
View File
@@ -0,0 +1,161 @@
/**
* 模拟完整的图像生成和显示流程
* 运行: npx tsx scripts/test-full-image-flow.ts
*/
import { config } from 'dotenv'
config()
import { uploadObject, getStorageProvider } from '../src/lib/storage'
import { extractStorageKeyFromLegacyValue, resolveMediaRefFromLegacyValue, getMediaObjectByPublicId } from '../src/lib/media/service'
import { attachMediaFieldsToProject } from '../src/lib/media/attach'
import { randomUUID } from 'crypto'
async function testFullImageFlow() {
console.log('🧪 模拟完整图像生成和显示流程...\n')
const provider = getStorageProvider()
console.log(`存储类型: ${provider.kind}\n`)
// 1. 模拟图像生成后的上传
console.log('1️⃣ 模拟图像生成后上传:')
const testKey = `images/location-${randomUUID()}.jpg`
const testImageContent = Buffer.from('fake-generated-image-data')
const storedKey = await uploadObject(testImageContent, testKey)
console.log(` ✅ 上传成功,返回 key: ${storedKey}`)
// 2. 模拟数据库存储(存储 key)
console.log('\n2️⃣ 模拟数据库存储:')
const mockDbLocation = {
id: 'loc-test-123',
name: '测试场景',
images: [
{
id: 'img-1',
imageUrl: storedKey, // 存储的是 key,不是完整 URL
imageIndex: 0,
}
]
}
console.log(` 存储的 imageUrl: ${storedKey}`)
// 3. 测试 extractStorageKeyFromLegacyValue
console.log('\n3️⃣ 测试 extractStorageKeyFromLegacyValue:')
const extractedKey = extractStorageKeyFromLegacyValue(storedKey)
console.log(` 输入: ${storedKey}`)
console.log(` 输出: ${extractedKey}`)
if (extractedKey) {
console.log(` ✅ 成功提取 storageKey`)
} else {
console.log(` ❌ 未能提取 storageKey - 这是问题所在!`)
}
// 4. 测试 resolveMediaRefFromLegacyValue(创建 MediaObject
console.log('\n4️⃣ 测试 resolveMediaRefFromLegacyValue:')
try {
const mediaRef = await resolveMediaRefFromLegacyValue(storedKey)
if (mediaRef) {
console.log(` ✅ MediaObject 创建/获取成功`)
console.log(` id: ${mediaRef.id}`)
console.log(` publicId: ${mediaRef.publicId}`)
console.log(` url: ${mediaRef.url}`)
console.log(` storageKey: ${mediaRef.storageKey}`)
} else {
console.log(` ❌ MediaRef 为 null`)
}
} catch (error) {
console.log(` ❌ 失败:`, error)
}
// 5. 测试 attachMediaFieldsToProject(完整流程)
console.log('\n5️⃣ 测试 attachMediaFieldsToProjectAPI 层转换):')
try {
const mockProject = {
id: 'proj-test',
locations: [mockDbLocation]
}
const result = await attachMediaFieldsToProject(mockProject)
const location = result.locations?.[0]
const image = location?.images?.[0]
console.log(` 转换后的 imageUrl: ${image?.imageUrl}`)
if (image?.imageUrl?.startsWith('/m/')) {
console.log(` ✅ 正确生成了 /m/ 格式的 URL`)
// 提取 publicId
const publicId = image.imageUrl.replace('/m/', '').split('?')[0]
console.log(` publicId: ${publicId}`)
// 验证 MediaObject 存在
const media = await getMediaObjectByPublicId(publicId)
if (media) {
console.log(` ✅ MediaObject 存在,storageKey: ${media.storageKey}`)
} else {
console.log(` ❌ MediaObject 不存在!`)
}
} else if (image?.imageUrl?.startsWith('http')) {
console.log(` ⚠️ 返回了完整 HTTP URL: ${image.imageUrl}`)
} else if (!image?.imageUrl) {
console.log(` ❌ imageUrl 为空!`)
} else {
console.log(` ⚠️ URL 格式: ${image.imageUrl}`)
}
} catch (error) {
console.log(` ❌ 失败:`, error)
}
// 6. 测试访问 /m/ URL
console.log('\n6️⃣ 测试访问 /m/ URL:')
try {
const mockProject = {
id: 'proj-test',
locations: [mockDbLocation]
}
const result = await attachMediaFieldsToProject(mockProject)
const imageUrl = result.locations?.[0]?.images?.[0]?.imageUrl
if (imageUrl?.startsWith('/m/')) {
const fullUrl = `http://localhost:3000${imageUrl}`
console.log(` 尝试访问: ${fullUrl}`)
try {
const response = await fetch(fullUrl, { redirect: 'manual' })
console.log(` 状态: ${response.status}`)
if (response.status === 200) {
console.log(` ✅ /m/ 端点工作正常`)
} else if (response.status === 307 || response.status === 302) {
console.log(` ✅ /m/ 端点返回重定向(正常)`)
console.log(` Location: ${response.headers.get('location')?.substring(0, 80)}...`)
} else if (response.status === 404) {
console.log(` ❌ MediaObject 未找到(404`)
} else {
console.log(` ⚠️ 状态码: ${response.status}`)
}
} catch (error) {
console.log(` ⚠️ 请求失败(可能服务器未启动):`, error)
}
} else {
console.log(` 跳过测试(URL 格式不正确)`)
}
} catch (error) {
console.log(` 跳过测试:`, error)
}
// 清理
console.log('\n7️⃣ 清理测试数据:')
try {
const { deleteObject } = await import('../src/lib/storage')
await deleteObject(storedKey)
console.log(` ✅ 删除成功`)
} catch (error) {
console.log(` ⚠️ 删除失败:`, error)
}
console.log('\n✨ 测试完成!')
}
testFullImageFlow().catch(console.error)
+125
View File
@@ -0,0 +1,125 @@
/**
* 图片 URL 流程测试
* 模拟图片生成后的存储和读取流程
* 运行: npx tsx scripts/test-image-url-flow.ts
*/
import { config } from 'dotenv'
config()
import { uploadObject, getSignedUrl, extractStorageKey, toFetchableUrl } from '../src/lib/storage'
import { keyToSignedUrl, addSignedUrlToLocation } from '../src/lib/storage'
import { encodeImageUrls, decodeImageUrlsFromDb } from '../src/lib/contracts/image-urls-contract'
import { randomUUID } from 'crypto'
async function testImageUrlFlow() {
console.log('🧪 测试图片 URL 全流程...\n')
// 1. 模拟上传图片到存储
console.log('1️⃣ 模拟上传图片:')
const testKey = `images/location-${randomUUID()}.jpg`
const testImageContent = Buffer.from('fake-image-data')
let storedKey: string
try {
storedKey = await uploadObject(testImageContent, testKey)
console.log(` ✅ 上传成功,返回 key: ${storedKey}`)
} catch (error) {
console.log(` ❌ 上传失败:`, error)
process.exit(1)
}
// 2. 模拟存储到数据库(encodeImageUrls
console.log('\n2️⃣ 模拟数据库存储(encodeImageUrls:')
const imageUrlsArray = [storedKey]
const dbValue = encodeImageUrls(imageUrlsArray)
console.log(` ✅ 数据库值: ${dbValue}`)
// 3. 模拟从数据库读取(decodeImageUrlsFromDb
console.log('\n3️⃣ 模拟数据库读取(decodeImageUrlsFromDb:')
const decodedKeys = decodeImageUrlsFromDb(dbValue)
console.log(` ✅ 解析出的 keys: ${JSON.stringify(decodedKeys)}`)
// 4. 测试 keyToSignedUrl(用于 API 返回给前端)
console.log('\n4️⃣ 测试 keyToSignedUrlAPI 层转换):')
for (const key of decodedKeys) {
const signedUrl = keyToSignedUrl(key)
console.log(` Key: ${key}`)
console.log(` → Signed URL: ${signedUrl}`)
// 检查是否是 /api/storage/sign 格式
if (signedUrl?.startsWith('/api/storage/sign')) {
console.log(` ✅ 正确生成了签名 URL 路径`)
} else if (signedUrl?.startsWith('http')) {
console.log(` ⚠️ 返回了完整 HTTP URL,可能无法直接访问`)
} else {
console.log(` ⚠️ URL 格式: ${signedUrl}`)
}
}
// 5. 测试 addSignedUrlToLocation(完整对象转换)
console.log('\n5️⃣ 测试 addSignedUrlToLocation(完整对象转换):')
const mockLocationFromDb = {
id: 'loc-123',
name: '测试场景',
images: [
{
id: 'img-1',
imageUrl: storedKey,
imageIndex: 0,
}
]
}
const locationWithSignedUrls = addSignedUrlToLocation(mockLocationFromDb)
console.log(` 转换后的 location.images:`)
for (const img of locationWithSignedUrls.images || []) {
console.log(` - imageIndex: ${img.imageIndex}`)
console.log(` - imageUrl: ${img.imageUrl}`)
if (img.imageUrl?.startsWith('/api/storage/sign')) {
console.log(` ✅ 正确: 是相对路径签名 URL`)
} else if (img.imageUrl?.startsWith('http://127.0.0.1:19000')) {
console.log(` ❌ 错误: 是 MinIO 直链,可能需要签名`)
} else if (img.imageUrl?.startsWith('http')) {
console.log(` ⚠️ 是外部 HTTP URL`)
} else {
console.log(` ⚠️ 其他格式: ${img.imageUrl}`)
}
}
// 6. 测试 getSignedUrl 直接调用
console.log('\n6️⃣ 测试 getSignedUrl 直接调用:')
const directSignedUrl = getSignedUrl(storedKey)
console.log(` Key: ${storedKey}`)
console.log(` → URL: ${directSignedUrl}`)
// 7. 测试 extractStorageKey
console.log('\n7️⃣ 测试 extractStorageKey(从各种 URL 提取 key:')
const testUrls = [
storedKey,
`http://127.0.0.1:19000/waoowaoo/${storedKey}`,
directSignedUrl,
]
for (const url of testUrls) {
const extracted = extractStorageKey(url)
console.log(` ${url.substring(0, 60)}...`)
console.log(` → extracted: ${extracted}`)
}
// 8. 清理测试数据
console.log('\n8️⃣ 清理测试数据:')
try {
const { deleteObject } = await import('../src/lib/storage')
await deleteObject(storedKey)
console.log(` ✅ 删除成功`)
} catch (error) {
console.log(` ⚠️ 删除失败(可忽略):`, error)
}
console.log('\n✨ 测试完成!')
console.log('\n📋 总结:')
console.log(' 如果第4、5步返回的是 /api/storage/sign?key=... 格式 → ✅ 正常')
console.log(' 如果第4、5步返回的是 http://127.0.0.1:19000/... 格式 → ❌ 需要修复')
}
testImageUrlFlow().catch(console.error)
+119
View File
@@ -0,0 +1,119 @@
/**
* MinIO 存储测试脚本
* 运行: npx tsx scripts/test-minio.ts
*/
import { config } from 'dotenv'
config() // 加载 .env 文件
import { getStorageProvider, uploadObject, getSignedObjectUrl, getObjectBuffer, deleteObject } from '../src/lib/storage'
import { randomUUID } from 'crypto'
async function testMinio() {
console.log('🧪 开始测试 MinIO 存储...\n')
// 1. 检查环境变量
console.log('1️⃣ 检查环境变量:')
const requiredEnv = [
'STORAGE_TYPE',
'MINIO_ENDPOINT',
'MINIO_ACCESS_KEY',
'MINIO_SECRET_KEY',
'MINIO_BUCKET',
]
for (const key of requiredEnv) {
const value = process.env[key]
if (value) {
// 隐藏敏感信息
const displayValue = key.includes('SECRET') || key.includes('KEY') && key !== 'STORAGE_TYPE'
? '*'.repeat(Math.min(value.length, 8))
: value
console.log(`${key}=${displayValue}`)
} else {
console.log(`${key}=未设置`)
}
}
// 2. 初始化 Provider
console.log('\n2️⃣ 初始化存储 Provider:')
try {
const provider = getStorageProvider()
console.log(` ✅ Provider 类型: ${provider.kind}`)
} catch (error) {
console.log(` ❌ 初始化失败:`, error)
process.exit(1)
}
// 3. 测试上传
console.log('\n3️⃣ 测试上传:')
const testKey = `test/${randomUUID()}.txt`
const testContent = `Hello MinIO! 测试时间: ${new Date().toISOString()}`
let uploadedKey: string
try {
uploadedKey = await uploadObject(Buffer.from(testContent), testKey)
console.log(` ✅ 上传成功: ${uploadedKey}`)
} catch (error) {
console.log(` ❌ 上传失败:`, error)
process.exit(1)
}
// 4. 测试获取签名 URL
console.log('\n4️⃣ 测试获取签名 URL:')
let signedUrl: string
try {
signedUrl = await getSignedObjectUrl(uploadedKey, 300)
console.log(` ✅ 签名 URL 生成成功`)
console.log(` URL: ${signedUrl.substring(0, 100)}...`)
} catch (error) {
console.log(` ❌ 签名 URL 生成失败:`, error)
process.exit(1)
}
// 5. 测试下载
console.log('\n5️⃣ 测试下载:')
try {
const buffer = await getObjectBuffer(uploadedKey)
const content = buffer.toString()
if (content === testContent) {
console.log(` ✅ 下载成功,内容匹配`)
} else {
console.log(` ❌ 下载成功,但内容不匹配`)
console.log(` 预期: ${testContent}`)
console.log(` 实际: ${content}`)
}
} catch (error) {
console.log(` ❌ 下载失败:`, error)
process.exit(1)
}
// 6. 通过 HTTP 访问签名 URL
console.log('\n6️⃣ 测试通过 HTTP 访问签名 URL:')
try {
const response = await fetch(signedUrl)
if (response.ok) {
const content = await response.text()
if (content === testContent) {
console.log(` ✅ HTTP 访问成功,内容匹配`)
} else {
console.log(` ❌ HTTP 访问成功,但内容不匹配`)
}
} else {
console.log(` ❌ HTTP 访问失败: ${response.status} ${response.statusText}`)
}
} catch (error) {
console.log(` ❌ HTTP 请求失败:`, error)
}
// 7. 清理测试文件
console.log('\n7️⃣ 清理测试文件:')
try {
await deleteObject(uploadedKey)
console.log(` ✅ 删除成功`)
} catch (error) {
console.log(` ❌ 删除失败:`, error)
}
console.log('\n✨ 测试完成!')
}
testMinio().catch(console.error)
+41
View File
@@ -0,0 +1,41 @@
#!/usr/bin/env bash
set -euo pipefail
if [ "$#" -eq 0 ]; then
echo "[regression-runner] missing command"
exit 2
fi
LOG_FILE="$(mktemp -t regression-runner.XXXXXX.log)"
set +e
"$@" 2>&1 | tee "$LOG_FILE"
CMD_STATUS=${PIPESTATUS[0]}
set -e
if [ "$CMD_STATUS" -ne 0 ]; then
echo
echo "[regression-runner] regression failed, collecting diagnostics..."
FAILED_FILES="$(grep -E '^ FAIL ' "$LOG_FILE" | sed -E 's/^ FAIL ([^ ]+).*/\1/' | sort -u || true)"
if [ -z "$FAILED_FILES" ]; then
echo "[regression-runner] no explicit FAIL file lines found in output"
else
echo "[regression-runner] failed files:"
while IFS= read -r file; do
[ -z "$file" ] && continue
echo " - $file"
LAST_COMMIT="$(git log -n 1 --format='%h %ad %an %s' --date=short -- "$file" || true)"
FIRST_COMMIT="$(git log --diff-filter=A --follow --format='%h %ad %an %s' --date=short -- "$file" | tail -n 1 || true)"
if [ -n "$LAST_COMMIT" ]; then
echo " latest: $LAST_COMMIT"
fi
if [ -n "$FIRST_COMMIT" ]; then
echo " first: $FIRST_COMMIT"
fi
done <<< "$FAILED_FILES"
fi
fi
rm -f "$LOG_FILE"
exit "$CMD_STATUS"
+95
View File
@@ -0,0 +1,95 @@
/**
* 测试 /api/storage/sign 端点
* 运行: npx tsx scripts/test-sign-api.ts
*/
import { config } from 'dotenv'
config()
import { uploadObject, getSignedObjectUrl } from '../src/lib/storage'
import { randomUUID } from 'crypto'
import http from 'http'
async function testSignApi() {
console.log('🧪 测试 /api/storage/sign API...\n')
// 1. 上传测试文件
console.log('1️⃣ 上传测试文件:')
const testKey = `images/test-${randomUUID()}.txt`
const testContent = 'Hello from MinIO test!'
await uploadObject(Buffer.from(testContent), testKey)
console.log(` ✅ 上传成功: ${testKey}`)
// 2. 生成签名 URL(服务端直接调用)
console.log('\n2️⃣ 服务端生成签名 URL:')
const signedUrl = await getSignedObjectUrl(testKey, 300)
console.log(` URL: ${signedUrl}`)
// 3. 测试直接访问签名 URL
console.log('\n3️⃣ 测试直接访问签名 URL:')
try {
const response = await fetch(signedUrl)
if (response.ok) {
const content = await response.text()
console.log(` ✅ 访问成功,内容: "${content}"`)
} else {
console.log(` ❌ 访问失败: ${response.status} ${response.statusText}`)
}
} catch (error) {
console.log(` ❌ 请求失败:`, error)
}
// 4. 测试 /api/storage/sign 端点(模拟前端访问)
console.log('\n4️⃣ 测试 /api/storage/sign 端点(模拟前端):')
const signApiUrl = `http://localhost:3000/api/storage/sign?key=${encodeURIComponent(testKey)}&expires=300`
console.log(` URL: ${signApiUrl}`)
try {
const response = await fetch(signApiUrl, { redirect: 'manual' })
console.log(` 状态: ${response.status}`)
console.log(` Location: ${response.headers.get('location')}`)
if (response.status === 307 || response.status === 302) {
const redirectUrl = response.headers.get('location')
console.log(` ✅ 重定向 URL: ${redirectUrl?.substring(0, 80)}...`)
// 5. 测试跟随重定向
console.log('\n5️⃣ 测试跟随重定向访问图片:')
const finalResponse = await fetch(signApiUrl, { redirect: 'follow' })
if (finalResponse.ok) {
const content = await finalResponse.text()
console.log(` ✅ 最终访问成功,内容: "${content}"`)
} else {
console.log(` ❌ 最终访问失败: ${finalResponse.status}`)
}
} else {
const body = await response.text()
console.log(` 响应: ${body.substring(0, 200)}`)
}
} catch (error) {
console.log(` ❌ 请求失败(可能服务器未启动):`, error)
}
// 6. 测试 /api/cos/image 端点(旧版兼容)
console.log('\n6️⃣ 测试 /api/cos/image 端点(旧版兼容):')
const cosApiUrl = `http://localhost:3000/api/cos/image?key=${encodeURIComponent(testKey)}&expires=300`
console.log(` URL: ${cosApiUrl}`)
try {
const response = await fetch(cosApiUrl, { redirect: 'manual' })
console.log(` 状态: ${response.status}`)
console.log(` Location: ${response.headers.get('location')}`)
} catch (error) {
console.log(` ❌ 请求失败(可能服务器未启动):`, error)
}
// 清理
console.log('\n7️⃣ 清理测试文件:')
const { deleteObject } = await import('../src/lib/storage')
await deleteObject(testKey)
console.log(` ✅ 清理完成`)
console.log('\n✨ 测试完成!')
}
testSignApi().catch(console.error)
+41
View File
@@ -0,0 +1,41 @@
import { PrismaClient } from '@prisma/client';
const p = new PrismaClient();
setTimeout(() => { console.error('TIMEOUT'); process.exit(1); }, 30000);
const userId = '3d84c341-87d7-4165-971d-a3f6c576aa21';
const needle = 'gemini-compatible:5b127c32-136e-4e5a-af74-8bae3e28be7a';
const modelFields = ['characterModel', 'locationModel', 'storyboardModel', 'editModel'];
// novelPromotionData is a relation, query directly
const npProjects = await p.novelPromotionProject.findMany({
where: { project: { userId } },
select: { id: true, projectId: true, characterModel: true, locationModel: true, storyboardModel: true, editModel: true, project: { select: { name: true } } }
});
let totalCleaned = 0;
for (const np of npProjects) {
const updates = {};
const cleanedFields = [];
for (const field of modelFields) {
if (typeof np[field] === 'string' && np[field].includes(needle)) {
updates[field] = '';
cleanedFields.push(`${field}: ${np[field]}`);
}
}
if (cleanedFields.length > 0) {
await p.novelPromotionProject.update({
where: { id: np.id },
data: updates
});
console.log(`${np.project.name} (${np.projectId}): cleared ${cleanedFields.length} fields`);
cleanedFields.forEach(f => console.log(` - ${f}`));
totalCleaned++;
}
}
console.log(`\nDone. Cleaned ${totalCleaned} projects.`);
await p.$disconnect();
process.exit(0);
+43
View File
@@ -0,0 +1,43 @@
import { PrismaClient } from '@prisma/client';
const p = new PrismaClient();
setTimeout(() => { console.error('TIMEOUT'); process.exit(1); }, 15000);
const userId = '3d84c341-87d7-4165-971d-a3f6c576aa21';
const needle = 'gemini-compatible:5b';
// 1. Check userPreference default models
const pref = await p.userPreference.findUnique({
where: { userId },
select: { analysisModel: true, characterModel: true, locationModel: true, storyboardModel: true, editModel: true, videoModel: true }
});
console.log('=== UserPreference defaults ===');
let found = false;
for (const [k, v] of Object.entries(pref || {})) {
if (typeof v === 'string' && v.includes(needle)) {
console.log(' FOUND in', k, ':', v);
found = true;
}
}
if (!found) console.log(' (clean)');
// 2. Check novelPromotionData JSON for any reference
const projects = await p.project.findMany({
where: { userId },
select: { id: true, name: true, novelPromotionData: true }
});
console.log('\n=== Project novelPromotionData ===');
for (const proj of projects) {
const data = JSON.stringify(proj.novelPromotionData || {});
if (data.includes(needle)) {
// Find which keys reference it
const parsed = proj.novelPromotionData;
for (const [k, v] of Object.entries(parsed || {})) {
if (typeof v === 'string' && v.includes(needle)) {
console.log(' FOUND in project', proj.id, '(' + proj.name + ') field:', k, '=', v);
}
}
}
}
await p.$disconnect();
process.exit(0);
+225
View File
@@ -0,0 +1,225 @@
import { createScopedLogger } from '@/lib/logging/core'
import { prisma } from '@/lib/prisma'
import { addTaskJob } from '@/lib/task/queues'
import { resolveTaskLocaleFromBody } from '@/lib/task/resolve-locale'
import { markTaskFailed } from '@/lib/task/service'
import { publishTaskEvent } from '@/lib/task/publisher'
import { TASK_EVENT_TYPE, TASK_TYPE, type TaskType } from '@/lib/task/types'
import { cleanupAllProjectLogs } from '@/lib/logging/file-writer'
const INTERVAL_MS = Number.parseInt(process.env.WATCHDOG_INTERVAL_MS || '30000', 10) || 30000
const HEARTBEAT_TIMEOUT_MS = Number.parseInt(process.env.TASK_HEARTBEAT_TIMEOUT_MS || '90000', 10) || 90000
const TASK_TYPE_SET: ReadonlySet<string> = new Set(Object.values(TASK_TYPE))
// 每小时执行一次日志清理
const LOG_CLEANUP_INTERVAL_TICKS = Math.ceil(3600_000 / INTERVAL_MS)
let tickCount = 0
const logger = createScopedLogger({
module: 'watchdog',
action: 'watchdog.tick',
})
function toTaskType(value: string): TaskType | null {
if (TASK_TYPE_SET.has(value)) {
return value as TaskType
}
return null
}
function toTaskPayload(value: unknown): Record<string, unknown> | null {
if (value && typeof value === 'object' && !Array.isArray(value)) {
return value as Record<string, unknown>
}
return null
}
async function recoverQueuedTasks() {
const rows = await prisma.task.findMany({
where: {
status: 'queued',
enqueuedAt: null,
},
take: 100,
orderBy: { createdAt: 'asc' },
})
for (const task of rows) {
const taskType = toTaskType(task.type)
if (!taskType) {
logger.error({
action: 'watchdog.reenqueue_invalid_type',
message: `invalid task type: ${task.type}`,
taskId: task.id,
projectId: task.projectId,
userId: task.userId,
errorCode: 'INVALID_PARAMS',
retryable: false,
})
continue
}
try {
const locale = resolveTaskLocaleFromBody(task.payload)
if (!locale) {
await markTaskFailed(task.id, 'TASK_LOCALE_REQUIRED', 'task locale is missing')
logger.error({
action: 'watchdog.reenqueue_locale_missing',
message: 'task locale is missing',
taskId: task.id,
projectId: task.projectId,
userId: task.userId,
errorCode: 'TASK_LOCALE_REQUIRED',
retryable: false,
})
continue
}
await addTaskJob({
taskId: task.id,
type: taskType,
locale,
projectId: task.projectId,
episodeId: task.episodeId,
targetType: task.targetType,
targetId: task.targetId,
payload: toTaskPayload(task.payload),
userId: task.userId,
})
await prisma.task.update({
where: { id: task.id },
data: {
enqueuedAt: new Date(),
enqueueAttempts: { increment: 1 },
lastEnqueueError: null,
},
})
logger.info({
action: 'watchdog.reenqueue',
message: 'watchdog re-enqueued queued task',
taskId: task.id,
projectId: task.projectId,
userId: task.userId,
details: {
type: task.type,
targetType: task.targetType,
targetId: task.targetId,
},
})
} catch (error: unknown) {
const message = error instanceof Error ? error.message : 're-enqueue failed'
await prisma.task.update({
where: { id: task.id },
data: {
enqueueAttempts: { increment: 1 },
lastEnqueueError: message,
},
})
logger.error({
action: 'watchdog.reenqueue_failed',
message,
taskId: task.id,
projectId: task.projectId,
userId: task.userId,
errorCode: 'EXTERNAL_ERROR',
retryable: true,
})
}
}
}
async function cleanupZombieProcessingTasks() {
const timeoutAt = new Date(Date.now() - HEARTBEAT_TIMEOUT_MS)
const rows = await prisma.task.findMany({
where: {
status: 'processing',
heartbeatAt: { lt: timeoutAt },
},
take: 100,
})
for (const task of rows) {
if ((task.attempt || 0) >= (task.maxAttempts || 5)) {
await markTaskFailed(task.id, 'WATCHDOG_TIMEOUT', 'Task heartbeat timeout')
await publishTaskEvent({
taskId: task.id,
projectId: task.projectId,
userId: task.userId,
type: TASK_EVENT_TYPE.FAILED,
payload: { reason: 'watchdog_timeout' },
})
logger.error({
action: 'watchdog.fail_timeout',
message: 'watchdog marked task as failed due to heartbeat timeout',
taskId: task.id,
projectId: task.projectId,
userId: task.userId,
errorCode: 'WATCHDOG_TIMEOUT',
retryable: true,
})
continue
}
await prisma.task.update({
where: { id: task.id },
data: {
status: 'queued',
enqueuedAt: null,
heartbeatAt: null,
startedAt: null,
},
})
await publishTaskEvent({
taskId: task.id,
projectId: task.projectId,
userId: task.userId,
type: TASK_EVENT_TYPE.CREATED,
payload: { reason: 'watchdog_requeue' },
})
logger.warn({
action: 'watchdog.requeue_processing',
message: 'watchdog re-queued stalled processing task',
taskId: task.id,
projectId: task.projectId,
userId: task.userId,
retryable: true,
})
}
}
async function tick() {
tickCount++
const startedAt = Date.now()
try {
await recoverQueuedTasks()
await cleanupZombieProcessingTasks()
// 每小时清理一次日志(过滤 24h 前内容)
if (tickCount % LOG_CLEANUP_INTERVAL_TICKS === 0) {
void cleanupAllProjectLogs()
}
logger.info({
action: 'watchdog.tick.ok',
message: 'watchdog tick completed',
durationMs: Date.now() - startedAt,
})
} catch (error: unknown) {
const message = error instanceof Error ? error.message : 'watchdog tick failed'
logger.error({
action: 'watchdog.tick.failed',
message,
durationMs: Date.now() - startedAt,
errorCode: 'INTERNAL_ERROR',
retryable: true,
})
}
}
logger.info({
action: 'watchdog.started',
message: 'watchdog started',
details: {
intervalMs: INTERVAL_MS,
heartbeatTimeoutMs: HEARTBEAT_TIMEOUT_MS,
},
})
void tick()
setInterval(() => {
void tick()
}, INTERVAL_MS)