release: opensource snapshot 2026-02-27 19:25:00

This commit is contained in:
saturn
2026-02-27 19:25:00 +08:00
commit 5de9622c8b
1055 changed files with 164772 additions and 0 deletions

View File

@@ -0,0 +1,140 @@
import { prisma } from '@/lib/prisma'
import { toMoneyNumber } from '@/lib/billing/money'
type CleanupStats = {
scanned: number
stale: number
rolledBack: number
skipped: number
errors: number
}
function hasApplyFlag() {
return process.argv.includes('--apply')
}
function parseHoursArg(defaultHours: number) {
const arg = process.argv.find((item) => item.startsWith('--hours='))
if (!arg) return defaultHours
const value = Number(arg.slice('--hours='.length))
if (!Number.isFinite(value) || value <= 0) return defaultHours
return Math.floor(value)
}
function writeJson(payload: unknown) {
process.stdout.write(`${JSON.stringify(payload, null, 2)}\n`)
}
function writeError(payload: unknown) {
process.stderr.write(`${typeof payload === 'string' ? payload : JSON.stringify(payload, null, 2)}\n`)
}
async function main() {
const apply = hasApplyFlag()
const hours = parseHoursArg(24)
const cutoff = new Date(Date.now() - hours * 60 * 60 * 1000)
const pending = await prisma.balanceFreeze.findMany({
where: {
status: 'pending',
createdAt: { lt: cutoff },
},
orderBy: { createdAt: 'asc' },
})
const stats: CleanupStats = {
scanned: pending.length,
stale: pending.length,
rolledBack: 0,
skipped: 0,
errors: 0,
}
if (!apply) {
writeJson({
mode: 'dry-run',
hours,
cutoff: cutoff.toISOString(),
stalePendingCount: pending.length,
stalePending: pending.map((f) => ({
id: f.id,
userId: f.userId,
amount: toMoneyNumber(f.amount),
createdAt: f.createdAt.toISOString(),
})),
})
return
}
for (const freeze of pending) {
try {
await prisma.$transaction(async (tx) => {
const current = await tx.balanceFreeze.findUnique({
where: { id: freeze.id },
})
if (!current || current.status !== 'pending') {
stats.skipped += 1
return
}
const balance = await tx.userBalance.findUnique({
where: { userId: current.userId },
})
if (!balance) {
stats.skipped += 1
return
}
const frozenAmount = toMoneyNumber(balance.frozenAmount)
const freezeAmount = toMoneyNumber(current.amount)
const nextFrozenAmount = Math.max(0, frozenAmount - freezeAmount)
const frozenDelta = frozenAmount - nextFrozenAmount
const balanceIncrement = frozenDelta
await tx.userBalance.update({
where: { userId: current.userId },
data: {
balance: { increment: balanceIncrement },
frozenAmount: { decrement: frozenDelta },
},
})
await tx.balanceFreeze.update({
where: { id: current.id },
data: {
status: 'rolled_back',
},
})
})
stats.rolledBack += 1
} catch (error) {
stats.errors += 1
writeError({
tag: 'billing-cleanup-pending-freezes.rollback_failed',
freezeId: freeze.id,
userId: freeze.userId,
amount: toMoneyNumber(freeze.amount),
error: error instanceof Error ? error.message : String(error),
})
}
}
writeJson({
mode: 'apply',
hours,
cutoff: cutoff.toISOString(),
stats,
})
}
main()
.catch((error) => {
writeError({
tag: 'billing-cleanup-pending-freezes.fatal',
error: error instanceof Error ? error.message : String(error),
})
process.exit(1)
})
.finally(async () => {
await prisma.$disconnect()
})

View File

@@ -0,0 +1,125 @@
import { prisma } from '@/lib/prisma'
import { roundMoney, toMoneyNumber } from '@/lib/billing/money'
type UserLedgerRow = {
userId: string
balance: number
frozenAmount: number
txNetAmount: number
ledgerAmount: number
diff: number
}
function hasStrictFlag() {
return process.argv.includes('--strict')
}
function write(payload: unknown) {
process.stdout.write(`${JSON.stringify(payload, null, 2)}\n`)
}
async function main() {
const strict = hasStrictFlag()
const [balances, txByUser, pendingFreezes] = await Promise.all([
prisma.userBalance.findMany({
select: {
userId: true,
balance: true,
frozenAmount: true,
},
}),
prisma.balanceTransaction.groupBy({
by: ['userId'],
_sum: { amount: true },
}),
prisma.balanceFreeze.findMany({
where: { status: 'pending' },
select: {
id: true,
userId: true,
taskId: true,
amount: true,
createdAt: true,
},
orderBy: { createdAt: 'asc' },
}),
])
const txNetByUser = new Map<string, number>()
for (const row of txByUser) {
txNetByUser.set(row.userId, roundMoney(toMoneyNumber(row._sum.amount), 8))
}
const ledgerRows: UserLedgerRow[] = balances.map((row) => {
const balance = toMoneyNumber(row.balance)
const frozenAmount = toMoneyNumber(row.frozenAmount)
const txNetAmount = roundMoney(txNetByUser.get(row.userId) || 0, 8)
const ledgerAmount = roundMoney(balance + frozenAmount, 8)
return {
userId: row.userId,
balance,
frozenAmount,
txNetAmount,
ledgerAmount,
diff: roundMoney(ledgerAmount - txNetAmount, 8),
}
})
const nonZeroDiffUsers = ledgerRows.filter((row) => Math.abs(row.diff) > 1e-8)
const pendingTaskIds = pendingFreezes
.map((row) => row.taskId)
.filter((taskId): taskId is string => typeof taskId === 'string' && taskId.length > 0)
const tasks = pendingTaskIds.length > 0
? await prisma.task.findMany({
where: { id: { in: pendingTaskIds } },
select: { id: true, status: true },
})
: []
const taskStatusById = new Map(tasks.map((row) => [row.id, row.status]))
const activeStatuses = new Set(['queued', 'processing'])
const orphanPendingFreezes = pendingFreezes.filter((freeze) => {
if (!freeze.taskId) return true
const status = taskStatusById.get(freeze.taskId)
if (!status) return true
return !activeStatuses.has(status)
})
const result = {
strict,
checkedAt: new Date().toISOString(),
totals: {
users: balances.length,
txUsers: txByUser.length,
pendingFreezes: pendingFreezes.length,
nonZeroDiffUsers: nonZeroDiffUsers.length,
orphanPendingFreezes: orphanPendingFreezes.length,
},
nonZeroDiffUsers,
orphanPendingFreezes: orphanPendingFreezes.map((row) => ({
id: row.id,
userId: row.userId,
taskId: row.taskId,
amount: toMoneyNumber(row.amount),
createdAt: row.createdAt.toISOString(),
})),
}
write(result)
if (strict && (nonZeroDiffUsers.length > 0 || orphanPendingFreezes.length > 0)) {
process.exitCode = 1
}
}
main()
.catch((error) => {
write({
error: error instanceof Error ? error.message : String(error),
})
process.exitCode = 1
})
.finally(async () => {
await prisma.$disconnect()
})

105
scripts/bull-board.ts Normal file
View File

@@ -0,0 +1,105 @@
import { createScopedLogger } from '@/lib/logging/core'
import express, { type NextFunction, type Request, type Response } from 'express'
import { createBullBoard } from '@bull-board/api'
import { BullMQAdapter } from '@bull-board/api/bullMQAdapter'
import { ExpressAdapter } from '@bull-board/express'
import { imageQueue, textQueue, videoQueue, voiceQueue } from '@/lib/task/queues'
const host = process.env.BULL_BOARD_HOST || '127.0.0.1'
const port = Number.parseInt(process.env.BULL_BOARD_PORT || '3010', 10) || 3010
const basePath = process.env.BULL_BOARD_BASE_PATH || '/admin/queues'
const authUser = process.env.BULL_BOARD_USER
const authPassword = process.env.BULL_BOARD_PASSWORD
const logger = createScopedLogger({
module: 'ops.bull_board',
})
function unauthorized(res: Response) {
res.setHeader('WWW-Authenticate', 'Basic realm="BullMQ Board"')
res.status(401).send('Authentication required')
}
function basicAuthMiddleware(req: Request, res: Response, next: NextFunction) {
if (!authUser && !authPassword) {
next()
return
}
const authorization = req.headers.authorization
if (!authorization?.startsWith('Basic ')) {
unauthorized(res)
return
}
const encoded = authorization.slice(6).trim()
let decoded = ''
try {
decoded = Buffer.from(encoded, 'base64').toString('utf8')
} catch {
unauthorized(res)
return
}
const index = decoded.indexOf(':')
if (index === -1) {
unauthorized(res)
return
}
const username = decoded.slice(0, index)
const password = decoded.slice(index + 1)
if (username !== (authUser || '') || password !== (authPassword || '')) {
unauthorized(res)
return
}
next()
}
const serverAdapter = new ExpressAdapter()
serverAdapter.setBasePath(basePath)
createBullBoard({
queues: [
new BullMQAdapter(imageQueue),
new BullMQAdapter(videoQueue),
new BullMQAdapter(voiceQueue),
new BullMQAdapter(textQueue),
],
serverAdapter,
})
const app = express()
app.disable('x-powered-by')
app.use(basePath, basicAuthMiddleware, serverAdapter.getRouter())
const server = app.listen(port, host, () => {
const secured = authUser || authPassword ? 'enabled' : 'disabled'
logger.info({
action: 'bull_board.started',
message: 'bull board listening',
details: {
host,
port,
basePath,
auth: secured,
},
})
})
async function shutdown(signal: string) {
logger.info({
action: 'bull_board.shutdown',
message: 'bull board shutting down',
details: {
signal,
},
})
await Promise.allSettled([imageQueue.close(), videoQueue.close(), voiceQueue.close(), textQueue.close()])
await new Promise<void>((resolve) => server.close(() => resolve()))
process.exit(0)
}
process.on('SIGINT', () => void shutdown('SIGINT'))
process.on('SIGTERM', () => void shutdown('SIGTERM'))

View File

@@ -0,0 +1,38 @@
import { logInfo as _ulogInfo, logError as _ulogError } from '@/lib/logging/core'
import { execSync } from 'node:child_process'
const ALLOWLIST = new Set([
'src/app/api/auth/[...nextauth]/route.ts',
'src/app/api/files/[...path]/route.ts',
'src/app/api/system/boot-id/route.ts',
])
function main() {
const output = execSync("rg --files src/app/api | rg 'route\\.ts$'", { encoding: 'utf8' })
const files = output
.split('\n')
.map((line) => line.trim())
.filter(Boolean)
const missing: string[] = []
for (const file of files) {
if (ALLOWLIST.has(file)) continue
const hasApiHandler = execSync(`rg -n \"apiHandler\" ${JSON.stringify(file)} || true`, { encoding: 'utf8' }).trim().length > 0
if (!hasApiHandler) {
missing.push(file)
}
}
if (missing.length > 0) {
_ulogError('[check-api-handler] missing apiHandler in:')
for (const file of missing) {
_ulogError(`- ${file}`)
}
process.exit(1)
}
_ulogInfo(`[check-api-handler] ok total=${files.length} allowlist=${ALLOWLIST.size}`)
}
main()

View File

@@ -0,0 +1,334 @@
import { promises as fs } from 'node:fs'
import path from 'node:path'
const CATALOG_DIR = path.resolve(process.cwd(), 'standards/capabilities')
const CAPABILITY_NAMESPACES = new Set(['llm', 'image', 'video', 'audio', 'lipsync'])
const CAPABILITY_NAMESPACE_ALLOWED_FIELDS = {
llm: new Set(['reasoningEffortOptions', 'fieldI18n']),
image: new Set(['resolutionOptions', 'fieldI18n']),
video: new Set([
'generationModeOptions',
'generateAudioOptions',
'durationOptions',
'fpsOptions',
'resolutionOptions',
'firstlastframe',
'supportGenerateAudio',
'fieldI18n',
]),
audio: new Set(['voiceOptions', 'rateOptions', 'fieldI18n']),
lipsync: new Set(['modeOptions', 'fieldI18n']),
}
const CAPABILITY_NAMESPACE_I18N_FIELDS = {
llm: { reasoningEffort: 'reasoningEffortOptions' },
image: { resolution: 'resolutionOptions' },
video: {
generationMode: 'generationModeOptions',
generateAudio: 'generateAudioOptions',
duration: 'durationOptions',
fps: 'fpsOptions',
resolution: 'resolutionOptions',
},
audio: { voice: 'voiceOptions', rate: 'rateOptions' },
lipsync: { mode: 'modeOptions' },
}
const MODEL_TYPES = new Set(['llm', 'image', 'video', 'audio', 'lipsync'])
function isRecord(value) {
return !!value && typeof value === 'object' && !Array.isArray(value)
}
function isNonEmptyString(value) {
return typeof value === 'string' && value.trim().length > 0
}
function isI18nKey(value) {
return isNonEmptyString(value) && value.includes('.')
}
function isStringArray(value) {
return Array.isArray(value) && value.every((item) => isNonEmptyString(item))
}
function isNumberArray(value) {
return Array.isArray(value) && value.every((item) => typeof item === 'number' && Number.isFinite(item))
}
function isBooleanArray(value) {
return Array.isArray(value) && value.every((item) => typeof item === 'boolean')
}
function parseModelKeyStrict(value) {
if (!isNonEmptyString(value)) return null
const raw = value.trim()
const marker = raw.indexOf('::')
if (marker === -1) return null
const provider = raw.slice(0, marker).trim()
const modelId = raw.slice(marker + 2).trim()
if (!provider || !modelId) return null
return { provider, modelId, modelKey: `${provider}::${modelId}` }
}
function pushIssue(issues, file, index, field, message) {
issues.push({ file, index, field, message })
}
function validateAllowedFields(issues, file, index, namespace, namespaceValue) {
if (!isRecord(namespaceValue)) return
const allowedFields = CAPABILITY_NAMESPACE_ALLOWED_FIELDS[namespace]
for (const field of Object.keys(namespaceValue)) {
if (allowedFields.has(field)) continue
if (field === 'i18n') {
pushIssue(issues, file, index, `capabilities.${namespace}.${field}`, 'use fieldI18n instead of i18n')
continue
}
pushIssue(issues, file, index, `capabilities.${namespace}.${field}`, `unknown capability field: ${field}`)
}
}
function validateFieldI18nMap(issues, file, index, namespace, namespaceValue) {
if (!isRecord(namespaceValue)) return
if (namespaceValue.fieldI18n === undefined) return
if (!isRecord(namespaceValue.fieldI18n)) {
pushIssue(issues, file, index, `capabilities.${namespace}.fieldI18n`, 'fieldI18n must be an object')
return
}
const allowedI18nFields = CAPABILITY_NAMESPACE_I18N_FIELDS[namespace]
for (const [fieldName, fieldConfig] of Object.entries(namespaceValue.fieldI18n)) {
if (!(fieldName in allowedI18nFields)) {
pushIssue(issues, file, index, `capabilities.${namespace}.fieldI18n.${fieldName}`, `unknown i18n field: ${fieldName}`)
continue
}
if (!isRecord(fieldConfig)) {
pushIssue(issues, file, index, `capabilities.${namespace}.fieldI18n.${fieldName}`, 'field i18n config must be an object')
continue
}
if (fieldConfig.labelKey !== undefined && !isI18nKey(fieldConfig.labelKey)) {
pushIssue(issues, file, index, `capabilities.${namespace}.fieldI18n.${fieldName}.labelKey`, 'labelKey must be an i18n key')
}
if (fieldConfig.unitKey !== undefined && !isI18nKey(fieldConfig.unitKey)) {
pushIssue(issues, file, index, `capabilities.${namespace}.fieldI18n.${fieldName}.unitKey`, 'unitKey must be an i18n key')
}
if (fieldConfig.optionLabelKeys !== undefined) {
if (!isRecord(fieldConfig.optionLabelKeys)) {
pushIssue(
issues,
file,
index,
`capabilities.${namespace}.fieldI18n.${fieldName}.optionLabelKeys`,
'optionLabelKeys must be an object',
)
continue
}
const optionFieldName = allowedI18nFields[fieldName]
const optionsRaw = namespaceValue[optionFieldName]
const allowedOptions = Array.isArray(optionsRaw) ? new Set(optionsRaw.map((value) => String(value))) : null
for (const [optionValue, optionLabel] of Object.entries(fieldConfig.optionLabelKeys)) {
if (!isI18nKey(optionLabel)) {
pushIssue(
issues,
file,
index,
`capabilities.${namespace}.fieldI18n.${fieldName}.optionLabelKeys.${optionValue}`,
'option label must be an i18n key',
)
}
if (allowedOptions && !allowedOptions.has(optionValue)) {
pushIssue(
issues,
file,
index,
`capabilities.${namespace}.fieldI18n.${fieldName}.optionLabelKeys.${optionValue}`,
`option ${optionValue} is not defined in ${optionFieldName}`,
)
}
}
}
}
}
function validateCapabilitiesForModelType(issues, file, index, modelType, capabilities) {
if (capabilities === undefined || capabilities === null) return
if (!isRecord(capabilities)) {
pushIssue(issues, file, index, 'capabilities', 'capabilities must be an object')
return
}
const expectedNamespace = modelType
for (const namespace of Object.keys(capabilities)) {
if (!CAPABILITY_NAMESPACES.has(namespace)) {
pushIssue(issues, file, index, `capabilities.${namespace}`, `unknown capabilities namespace: ${namespace}`)
continue
}
if (namespace !== expectedNamespace) {
pushIssue(
issues,
file,
index,
`capabilities.${namespace}`,
`namespace ${namespace} is not allowed for model type ${modelType}`,
)
}
}
const llm = capabilities.llm
if (llm !== undefined) {
if (!isRecord(llm)) {
pushIssue(issues, file, index, 'capabilities.llm', 'llm capabilities must be an object')
} else {
validateAllowedFields(issues, file, index, 'llm', llm)
if (llm.reasoningEffortOptions !== undefined && !isStringArray(llm.reasoningEffortOptions)) {
pushIssue(issues, file, index, 'capabilities.llm.reasoningEffortOptions', 'must be string array')
}
validateFieldI18nMap(issues, file, index, 'llm', llm)
}
}
const image = capabilities.image
if (image !== undefined) {
if (!isRecord(image)) {
pushIssue(issues, file, index, 'capabilities.image', 'image capabilities must be an object')
} else {
validateAllowedFields(issues, file, index, 'image', image)
if (image.resolutionOptions !== undefined && !isStringArray(image.resolutionOptions)) {
pushIssue(issues, file, index, 'capabilities.image.resolutionOptions', 'must be string array')
}
validateFieldI18nMap(issues, file, index, 'image', image)
}
}
const video = capabilities.video
if (video !== undefined) {
if (!isRecord(video)) {
pushIssue(issues, file, index, 'capabilities.video', 'video capabilities must be an object')
} else {
validateAllowedFields(issues, file, index, 'video', video)
if (video.generationModeOptions !== undefined && !isStringArray(video.generationModeOptions)) {
pushIssue(issues, file, index, 'capabilities.video.generationModeOptions', 'must be string array')
}
if (video.generateAudioOptions !== undefined && !isBooleanArray(video.generateAudioOptions)) {
pushIssue(issues, file, index, 'capabilities.video.generateAudioOptions', 'must be boolean array')
}
if (video.durationOptions !== undefined && !isNumberArray(video.durationOptions)) {
pushIssue(issues, file, index, 'capabilities.video.durationOptions', 'must be number array')
}
if (video.fpsOptions !== undefined && !isNumberArray(video.fpsOptions)) {
pushIssue(issues, file, index, 'capabilities.video.fpsOptions', 'must be number array')
}
if (video.resolutionOptions !== undefined && !isStringArray(video.resolutionOptions)) {
pushIssue(issues, file, index, 'capabilities.video.resolutionOptions', 'must be string array')
}
if (video.supportGenerateAudio !== undefined && typeof video.supportGenerateAudio !== 'boolean') {
pushIssue(issues, file, index, 'capabilities.video.supportGenerateAudio', 'must be boolean')
}
if (video.firstlastframe !== undefined && typeof video.firstlastframe !== 'boolean') {
pushIssue(issues, file, index, 'capabilities.video.firstlastframe', 'must be boolean')
}
validateFieldI18nMap(issues, file, index, 'video', video)
}
}
const audio = capabilities.audio
if (audio !== undefined) {
if (!isRecord(audio)) {
pushIssue(issues, file, index, 'capabilities.audio', 'audio capabilities must be an object')
} else {
validateAllowedFields(issues, file, index, 'audio', audio)
if (audio.voiceOptions !== undefined && !isStringArray(audio.voiceOptions)) {
pushIssue(issues, file, index, 'capabilities.audio.voiceOptions', 'must be string array')
}
if (audio.rateOptions !== undefined && !isStringArray(audio.rateOptions)) {
pushIssue(issues, file, index, 'capabilities.audio.rateOptions', 'must be string array')
}
validateFieldI18nMap(issues, file, index, 'audio', audio)
}
}
const lipsync = capabilities.lipsync
if (lipsync !== undefined) {
if (!isRecord(lipsync)) {
pushIssue(issues, file, index, 'capabilities.lipsync', 'lipsync capabilities must be an object')
} else {
validateAllowedFields(issues, file, index, 'lipsync', lipsync)
if (lipsync.modeOptions !== undefined && !isStringArray(lipsync.modeOptions)) {
pushIssue(issues, file, index, 'capabilities.lipsync.modeOptions', 'must be string array')
}
validateFieldI18nMap(issues, file, index, 'lipsync', lipsync)
}
}
}
async function listCatalogFiles() {
const entries = await fs.readdir(CATALOG_DIR, { withFileTypes: true })
return entries
.filter((entry) => entry.isFile() && entry.name.endsWith('.json'))
.map((entry) => path.join(CATALOG_DIR, entry.name))
}
async function readCatalog(filePath) {
const raw = await fs.readFile(filePath, 'utf8')
const parsed = JSON.parse(raw)
if (!Array.isArray(parsed)) {
throw new Error(`catalog must be an array: ${filePath}`)
}
return parsed
}
async function main() {
const issues = []
const files = await listCatalogFiles()
if (files.length === 0) {
throw new Error(`no catalog files found in ${CATALOG_DIR}`)
}
for (const filePath of files) {
const catalogItems = await readCatalog(filePath)
for (let index = 0; index < catalogItems.length; index += 1) {
const item = catalogItems[index]
if (!isRecord(item)) {
pushIssue(issues, filePath, index, 'entry', 'entry must be an object')
continue
}
if (!isNonEmptyString(item.modelType) || !MODEL_TYPES.has(item.modelType)) {
pushIssue(issues, filePath, index, 'modelType', 'modelType must be llm/image/video/audio/lipsync')
continue
}
if (!isNonEmptyString(item.provider)) {
pushIssue(issues, filePath, index, 'provider', 'provider must be a non-empty string')
}
if (!isNonEmptyString(item.modelId)) {
pushIssue(issues, filePath, index, 'modelId', 'modelId must be a non-empty string')
}
const modelKey = `${item.provider || ''}::${item.modelId || ''}`
if (!parseModelKeyStrict(modelKey)) {
pushIssue(issues, filePath, index, 'modelKey', 'provider/modelId must compose a valid provider::modelId')
}
validateCapabilitiesForModelType(issues, filePath, index, item.modelType, item.capabilities)
}
}
if (issues.length === 0) {
process.stdout.write(`[check-capability-catalog] OK (${files.length} files)\n`)
return
}
const maxPrint = 50
for (const issue of issues.slice(0, maxPrint)) {
process.stdout.write(`[check-capability-catalog] ${issue.file}#${issue.index} ${issue.field}: ${issue.message}\n`)
}
if (issues.length > maxPrint) {
process.stdout.write(`[check-capability-catalog] ... ${issues.length - maxPrint} more issues\n`)
}
process.exitCode = 1
}
main().catch((error) => {
process.stderr.write(`[check-capability-catalog] failed: ${String(error)}\n`)
process.exitCode = 1
})

View File

@@ -0,0 +1,118 @@
import { logInfo as _ulogInfo, logError as _ulogError } from '@/lib/logging/core'
import { prisma } from '@/lib/prisma'
import { decodeImageUrlsFromDb } from '@/lib/contracts/image-urls-contract'
type AppearanceRow = {
id: string
imageUrls: string | null
previousImageUrls: string | null
}
type DynamicModel = {
findMany: (args: unknown) => Promise<AppearanceRow[]>
}
const BATCH_SIZE = 500
const MODELS: Array<{ name: string; model: string }> = [
{ name: 'CharacterAppearance', model: 'characterAppearance' },
{ name: 'GlobalCharacterAppearance', model: 'globalCharacterAppearance' },
]
const prismaDynamic = prisma as unknown as Record<string, DynamicModel>
function print(message: string) {
process.stdout.write(`${message}\n`)
}
async function checkModel(modelName: string, modelKey: string) {
const model = prismaDynamic[modelKey]
if (!model) {
throw new Error(`Prisma model not found: ${modelKey}`)
}
let scanned = 0
let violations = 0
const samples: Array<{ id: string; field: 'imageUrls' | 'previousImageUrls'; message: string; value: string | null }> = []
let cursor: string | null = null
while (true) {
const rows = await model.findMany({
select: {
id: true,
imageUrls: true,
previousImageUrls: true,
},
...(cursor
? {
cursor: { id: cursor },
skip: 1,
}
: {}),
orderBy: { id: 'asc' },
take: BATCH_SIZE,
})
if (rows.length === 0) break
for (const row of rows) {
scanned += 1
for (const fieldName of ['imageUrls', 'previousImageUrls'] as const) {
try {
decodeImageUrlsFromDb(row[fieldName], `${modelName}.${fieldName}`)
} catch (error) {
violations += 1
if (samples.length < 20) {
samples.push({
id: row.id,
field: fieldName,
message: error instanceof Error ? error.message : String(error),
value: row[fieldName],
})
}
}
}
}
cursor = rows[rows.length - 1]?.id || null
}
const summary = `[check-image-urls-contract] ${modelName}: scanned=${scanned} violations=${violations}`
_ulogInfo(summary)
print(summary)
if (samples.length > 0) {
_ulogError(`[check-image-urls-contract] ${modelName}: samples=${JSON.stringify(samples, null, 2)}`)
}
return { scanned, violations }
}
async function main() {
let totalScanned = 0
let totalViolations = 0
for (const target of MODELS) {
const result = await checkModel(target.name, target.model)
totalScanned += result.scanned
totalViolations += result.violations
}
if (totalViolations > 0) {
_ulogError(`[check-image-urls-contract] failed scanned=${totalScanned} violations=${totalViolations}`)
print(`[check-image-urls-contract] failed scanned=${totalScanned} violations=${totalViolations}`)
process.exitCode = 1
return
}
print(`[check-image-urls-contract] ok scanned=${totalScanned}`)
}
main()
.catch((error) => {
_ulogError('[check-image-urls-contract] failed:', error)
process.exitCode = 1
})
.finally(async () => {
await prisma.$disconnect()
})

View File

@@ -0,0 +1,110 @@
import fs from 'node:fs'
type Rule = {
file: string
patterns: string[]
}
const RULES: Rule[] = [
{
file: 'src/lib/api-errors.ts',
patterns: ['x-request-id', 'api.request.start', 'api.request.finish', 'api.request.error'],
},
{
file: 'src/lib/workers/shared.ts',
patterns: ['worker.start', 'worker.completed', 'worker.failed', 'durationMs', 'errorCode'],
},
{
file: 'src/app/api/sse/route.ts',
patterns: ['sse.connect', 'sse.replay', 'sse.disconnect'],
},
{
file: 'scripts/watchdog.ts',
patterns: ['watchdog.started', 'watchdog.tick.ok', 'watchdog.tick.failed'],
},
{
file: 'scripts/bull-board.ts',
patterns: ['bull_board.started', 'bull_board.shutdown'],
},
{
file: 'src/lib/task/submitter.ts',
patterns: ['requestId', 'task.submit.created', 'task.submit.enqueued'],
},
{
file: 'src/lib/task/types.ts',
patterns: ['trace', 'requestId'],
},
]
function read(file: string) {
return fs.readFileSync(file, 'utf8')
}
function checkRules() {
const violations: string[] = []
for (const rule of RULES) {
const content = read(rule.file)
for (const pattern of rule.patterns) {
if (!content.includes(pattern)) {
violations.push(`${rule.file} missing "${pattern}"`)
}
}
}
return violations
}
function checkSubmitTaskRoutes() {
const root = 'src/app/api'
const files = walk(root).filter((file) => file.endsWith('/route.ts'))
const submitTaskFiles = files.filter((file) => read(file).includes('submitTask('))
const violations: string[] = []
for (const file of submitTaskFiles) {
const content = read(file)
if (!content.includes('getRequestId')) {
violations.push(`${file} uses submitTask but does not import getRequestId`)
continue
}
if (!content.includes('requestId: getRequestId(request)')) {
violations.push(`${file} uses submitTask but does not pass requestId`)
}
}
return { submitTaskFiles, violations }
}
function walk(dir: string): string[] {
const entries = fs.readdirSync(dir, { withFileTypes: true })
const out: string[] = []
for (const entry of entries) {
const next = `${dir}/${entry.name}`
if (entry.isDirectory()) {
out.push(...walk(next))
} else {
out.push(next)
}
}
return out
}
function main() {
const violations = checkRules()
const submitTaskResult = checkSubmitTaskRoutes()
violations.push(...submitTaskResult.violations)
if (violations.length > 0) {
process.stderr.write('[check:log-semantic] semantic violations detected:\n')
for (const violation of violations) {
process.stderr.write(`- ${violation}\n`)
}
process.exit(1)
}
process.stdout.write(
`[check:log-semantic] ok rules=${RULES.length} submitTaskRoutes=${submitTaskResult.submitTaskFiles.length}\n`,
)
}
main()

View File

@@ -0,0 +1,110 @@
import { execSync } from 'node:child_process'
const TARGETS = ['src/app/api', 'src/lib']
const EXTRACT_ALLOWLIST = new Set<string>([
'src/lib/media/service.ts',
'src/lib/cos.ts',
])
const FETCH_MEDIA_ALLOWLIST = new Set<string>([
'src/lib/cos.ts',
'src/lib/media-process.ts',
'src/lib/image-cache.ts',
'src/lib/image-label.ts',
'src/lib/workers/utils.ts',
'src/app/api/novel-promotion/[projectId]/download-images/route.ts',
'src/app/api/novel-promotion/[projectId]/download-videos/route.ts',
'src/app/api/novel-promotion/[projectId]/download-voices/route.ts',
'src/app/api/novel-promotion/[projectId]/update-asset-label/route.ts',
'src/app/api/novel-promotion/[projectId]/voice-generate/route.ts',
'src/app/api/novel-promotion/[projectId]/video-proxy/route.ts',
])
function run(cmd: string): string {
try {
return execSync(cmd, { encoding: 'utf8' })
} catch (error: unknown) {
if (error && typeof error === 'object' && 'stdout' in error) {
const stdout = (error as { stdout?: unknown }).stdout
return typeof stdout === 'string' ? stdout : ''
}
return ''
}
}
function parseLines(output: string): string[] {
return output
.split('\n')
.map((line) => line.trim())
.filter(Boolean)
}
function getFile(line: string): string {
return line.split(':', 1)[0] || ''
}
function getCode(line: string): string {
const parts = line.split(':')
return parts.slice(2).join(':').trim()
}
function extractFetchArg(code: string): string {
const matched = code.match(/fetch\(\s*([^)]+)\)/)
return matched?.[1]?.trim() || ''
}
function isSafeFetchArg(arg: string): boolean {
if (!arg) return false
if (/^toFetchableUrl\(/.test(arg)) return true
if (/^['"`]/.test(arg)) return true
if (/^new URL\(/.test(arg)) return true
return false
}
function isMediaLikeFetchArg(arg: string): boolean {
return /(image|video|audio|signed).*url/i.test(arg) || /url.*(image|video|audio|signed)/i.test(arg)
}
function main() {
const targetExpr = TARGETS.join(' ')
// 规则 1业务代码中不允许直接调用 extractCOSKey统一走 resolveStorageKeyFromMediaValue
const extractOutput = run(`rg -n "extractCOSKey\\\\(" ${targetExpr}`)
const extractLines = parseLines(extractOutput)
const extractViolations = extractLines.filter((line) => {
const file = getFile(line)
return !EXTRACT_ALLOWLIST.has(file)
})
// 规则 2媒体相关 fetch 必须包裹 toFetchableUrl
const fetchOutput = run(`rg -n "fetch\\\\(" ${targetExpr}`)
const fetchLines = parseLines(fetchOutput)
const fetchViolations = fetchLines.filter((line) => {
const file = getFile(line)
if (!FETCH_MEDIA_ALLOWLIST.has(file)) return false
const code = getCode(line)
const arg = extractFetchArg(code)
if (!isMediaLikeFetchArg(arg)) return false
return !isSafeFetchArg(arg)
})
const violations = [
...extractViolations.map((line) => `extractCOSKey forbidden: ${line}`),
...fetchViolations.map((line) => `fetch without toFetchableUrl: ${line}`),
]
if (violations.length > 0) {
process.stderr.write('[check:media-normalization] found violations:\n')
for (const item of violations) {
process.stderr.write(`- ${item}\n`)
}
process.exit(1)
}
process.stdout.write(
`[check:media-normalization] ok extract_scanned=${extractLines.length} fetch_scanned=${fetchLines.length} allow_extract=${EXTRACT_ALLOWLIST.size} allow_fetch=${FETCH_MEDIA_ALLOWLIST.size}\n`,
)
}
main()

View File

@@ -0,0 +1,462 @@
let prisma
const STRICT = process.argv.includes('--strict')
const MODEL_FIELDS = [
'analysisModel',
'characterModel',
'locationModel',
'storyboardModel',
'editModel',
'videoModel',
]
const MAX_SAMPLES = 200
const CAPABILITY_NAMESPACES = new Set(['llm', 'image', 'video', 'audio', 'lipsync'])
const MODEL_TYPES = new Set(['llm', 'image', 'video', 'audio', 'lipsync'])
const CAPABILITY_NAMESPACE_ALLOWED_FIELDS = {
llm: new Set(['reasoningEffortOptions', 'fieldI18n']),
image: new Set(['resolutionOptions', 'fieldI18n']),
video: new Set([
'durationOptions',
'fpsOptions',
'resolutionOptions',
'firstlastframe',
'supportGenerateAudio',
'fieldI18n',
]),
audio: new Set(['voiceOptions', 'rateOptions', 'fieldI18n']),
lipsync: new Set(['modeOptions', 'fieldI18n']),
}
const CAPABILITY_NAMESPACE_I18N_FIELDS = {
llm: {
reasoningEffort: 'reasoningEffortOptions',
},
image: {
resolution: 'resolutionOptions',
},
video: {
duration: 'durationOptions',
fps: 'fpsOptions',
resolution: 'resolutionOptions',
},
audio: {
voice: 'voiceOptions',
rate: 'rateOptions',
},
lipsync: {
mode: 'modeOptions',
},
}
function isRecord(value) {
return !!value && typeof value === 'object' && !Array.isArray(value)
}
function isNonEmptyString(value) {
return typeof value === 'string' && value.trim().length > 0
}
function isStringArray(value) {
return Array.isArray(value) && value.every((item) => isNonEmptyString(item))
}
function isNumberArray(value) {
return Array.isArray(value) && value.every((item) => typeof item === 'number' && Number.isFinite(item))
}
function parseModelKeyStrict(value) {
if (!isNonEmptyString(value)) return null
const raw = value.trim()
const marker = raw.indexOf('::')
if (marker === -1) return null
const provider = raw.slice(0, marker).trim()
const modelId = raw.slice(marker + 2).trim()
if (!provider || !modelId) return null
return {
provider,
modelId,
modelKey: `${provider}::${modelId}`,
}
}
function addSample(summary, sample) {
if (summary.samples.length >= MAX_SAMPLES) return
summary.samples.push(sample)
}
function pushIssue(issues, field, message) {
issues.push({ field, message })
}
function isI18nKey(value) {
return isNonEmptyString(value) && value.includes('.')
}
function validateAllowedFields(issues, namespace, namespaceValue) {
if (!isRecord(namespaceValue)) return
const allowedFields = CAPABILITY_NAMESPACE_ALLOWED_FIELDS[namespace]
for (const field of Object.keys(namespaceValue)) {
if (allowedFields.has(field)) continue
if (field === 'i18n') {
pushIssue(issues, `capabilities.${namespace}.${field}`, 'use fieldI18n instead of i18n')
continue
}
pushIssue(issues, `capabilities.${namespace}.${field}`, `unknown capability field: ${field}`)
}
}
function validateFieldI18nMap(issues, namespace, namespaceValue) {
if (!isRecord(namespaceValue)) return
if (namespaceValue.fieldI18n === undefined) return
if (!isRecord(namespaceValue.fieldI18n)) {
pushIssue(issues, `capabilities.${namespace}.fieldI18n`, 'fieldI18n must be an object')
return
}
const allowedI18nFields = CAPABILITY_NAMESPACE_I18N_FIELDS[namespace]
for (const [fieldName, fieldConfig] of Object.entries(namespaceValue.fieldI18n)) {
if (!(fieldName in allowedI18nFields)) {
pushIssue(
issues,
`capabilities.${namespace}.fieldI18n.${fieldName}`,
`unknown i18n field: ${fieldName}`,
)
continue
}
if (!isRecord(fieldConfig)) {
pushIssue(
issues,
`capabilities.${namespace}.fieldI18n.${fieldName}`,
'field i18n config must be an object',
)
continue
}
if (fieldConfig.labelKey !== undefined && !isI18nKey(fieldConfig.labelKey)) {
pushIssue(
issues,
`capabilities.${namespace}.fieldI18n.${fieldName}.labelKey`,
'labelKey must be an i18n key',
)
}
if (fieldConfig.unitKey !== undefined && !isI18nKey(fieldConfig.unitKey)) {
pushIssue(
issues,
`capabilities.${namespace}.fieldI18n.${fieldName}.unitKey`,
'unitKey must be an i18n key',
)
}
if (fieldConfig.optionLabelKeys !== undefined) {
if (!isRecord(fieldConfig.optionLabelKeys)) {
pushIssue(
issues,
`capabilities.${namespace}.fieldI18n.${fieldName}.optionLabelKeys`,
'optionLabelKeys must be an object',
)
continue
}
const optionFieldName = allowedI18nFields[fieldName]
const allowedOptionsRaw = namespaceValue[optionFieldName]
const allowedOptions = Array.isArray(allowedOptionsRaw)
? new Set(allowedOptionsRaw.map((value) => String(value)))
: null
for (const [optionValue, optionLabelKey] of Object.entries(fieldConfig.optionLabelKeys)) {
if (!isI18nKey(optionLabelKey)) {
pushIssue(
issues,
`capabilities.${namespace}.fieldI18n.${fieldName}.optionLabelKeys.${optionValue}`,
'option label must be an i18n key',
)
}
if (allowedOptions && !allowedOptions.has(optionValue)) {
pushIssue(
issues,
`capabilities.${namespace}.fieldI18n.${fieldName}.optionLabelKeys.${optionValue}`,
`option ${optionValue} is not defined in ${optionFieldName}`,
)
}
}
}
}
}
function validateCapabilities(modelType, capabilities) {
const issues = []
if (!MODEL_TYPES.has(modelType)) {
pushIssue(issues, 'type', 'type must be llm/image/video/audio/lipsync')
return issues
}
if (capabilities === undefined || capabilities === null) return issues
if (!isRecord(capabilities)) {
pushIssue(issues, 'capabilities', 'capabilities must be an object')
return issues
}
for (const namespace of Object.keys(capabilities)) {
if (!CAPABILITY_NAMESPACES.has(namespace)) {
pushIssue(issues, `capabilities.${namespace}`, `unknown capabilities namespace: ${namespace}`)
continue
}
if (namespace !== modelType) {
pushIssue(issues, `capabilities.${namespace}`, `namespace ${namespace} is not allowed for model type ${modelType}`)
}
}
const llm = capabilities.llm
if (llm !== undefined) {
if (!isRecord(llm)) {
pushIssue(issues, 'capabilities.llm', 'llm capabilities must be an object')
} else {
validateAllowedFields(issues, 'llm', llm)
if (llm.reasoningEffortOptions !== undefined && !isStringArray(llm.reasoningEffortOptions)) {
pushIssue(issues, 'capabilities.llm.reasoningEffortOptions', 'must be string array')
}
validateFieldI18nMap(issues, 'llm', llm)
}
}
const image = capabilities.image
if (image !== undefined) {
if (!isRecord(image)) {
pushIssue(issues, 'capabilities.image', 'image capabilities must be an object')
} else {
validateAllowedFields(issues, 'image', image)
if (image.resolutionOptions !== undefined && !isStringArray(image.resolutionOptions)) {
pushIssue(issues, 'capabilities.image.resolutionOptions', 'must be string array')
}
validateFieldI18nMap(issues, 'image', image)
}
}
const video = capabilities.video
if (video !== undefined) {
if (!isRecord(video)) {
pushIssue(issues, 'capabilities.video', 'video capabilities must be an object')
} else {
validateAllowedFields(issues, 'video', video)
if (video.durationOptions !== undefined && !isNumberArray(video.durationOptions)) {
pushIssue(issues, 'capabilities.video.durationOptions', 'must be number array')
}
if (video.fpsOptions !== undefined && !isNumberArray(video.fpsOptions)) {
pushIssue(issues, 'capabilities.video.fpsOptions', 'must be number array')
}
if (video.resolutionOptions !== undefined && !isStringArray(video.resolutionOptions)) {
pushIssue(issues, 'capabilities.video.resolutionOptions', 'must be string array')
}
if (video.supportGenerateAudio !== undefined && typeof video.supportGenerateAudio !== 'boolean') {
pushIssue(issues, 'capabilities.video.supportGenerateAudio', 'must be boolean')
}
if (video.firstlastframe !== undefined && typeof video.firstlastframe !== 'boolean') {
pushIssue(issues, 'capabilities.video.firstlastframe', 'must be boolean')
}
validateFieldI18nMap(issues, 'video', video)
}
}
const audio = capabilities.audio
if (audio !== undefined) {
if (!isRecord(audio)) {
pushIssue(issues, 'capabilities.audio', 'audio capabilities must be an object')
} else {
validateAllowedFields(issues, 'audio', audio)
if (audio.voiceOptions !== undefined && !isStringArray(audio.voiceOptions)) {
pushIssue(issues, 'capabilities.audio.voiceOptions', 'must be string array')
}
if (audio.rateOptions !== undefined && !isStringArray(audio.rateOptions)) {
pushIssue(issues, 'capabilities.audio.rateOptions', 'must be string array')
}
validateFieldI18nMap(issues, 'audio', audio)
}
}
const lipsync = capabilities.lipsync
if (lipsync !== undefined) {
if (!isRecord(lipsync)) {
pushIssue(issues, 'capabilities.lipsync', 'lipsync capabilities must be an object')
} else {
validateAllowedFields(issues, 'lipsync', lipsync)
if (lipsync.modeOptions !== undefined && !isStringArray(lipsync.modeOptions)) {
pushIssue(issues, 'capabilities.lipsync.modeOptions', 'must be string array')
}
validateFieldI18nMap(issues, 'lipsync', lipsync)
}
}
return issues
}
async function main() {
let PrismaClient
try {
({ PrismaClient } = await import('@prisma/client'))
} catch {
throw new Error('MISSING_DEPENDENCY: @prisma/client is not installed, run npm install first')
}
prisma = new PrismaClient()
const summary = {
generatedAt: new Date().toISOString(),
userPreference: {
total: 0,
invalidModelKeyFields: 0,
invalidCustomModelsJson: 0,
invalidCustomModelShape: 0,
invalidCapabilities: 0,
},
novelPromotionProject: {
total: 0,
invalidModelKeyFields: 0,
},
samples: [],
}
const userPrefs = await prisma.userPreference.findMany({
select: {
id: true,
customModels: true,
analysisModel: true,
characterModel: true,
locationModel: true,
storyboardModel: true,
editModel: true,
videoModel: true,
},
})
for (const pref of userPrefs) {
summary.userPreference.total += 1
for (const field of MODEL_FIELDS) {
const rawValue = pref[field]
if (!rawValue) continue
if (!parseModelKeyStrict(rawValue)) {
summary.userPreference.invalidModelKeyFields += 1
addSample(summary, {
table: 'userPreference',
rowId: pref.id,
field,
reason: 'model field is not provider::modelId',
})
}
}
if (!pref.customModels) continue
let parsedCustomModels
try {
parsedCustomModels = JSON.parse(pref.customModels)
} catch {
summary.userPreference.invalidCustomModelsJson += 1
addSample(summary, {
table: 'userPreference',
rowId: pref.id,
field: 'customModels',
reason: 'invalid JSON',
})
continue
}
if (!Array.isArray(parsedCustomModels)) {
summary.userPreference.invalidCustomModelsJson += 1
addSample(summary, {
table: 'userPreference',
rowId: pref.id,
field: 'customModels',
reason: 'customModels is not array',
})
continue
}
for (let index = 0; index < parsedCustomModels.length; index += 1) {
const modelRaw = parsedCustomModels[index]
if (!isRecord(modelRaw)) {
summary.userPreference.invalidCustomModelShape += 1
addSample(summary, {
table: 'userPreference',
rowId: pref.id,
field: `customModels[${index}]`,
reason: 'model item is not object',
})
continue
}
const modelKey = isNonEmptyString(modelRaw.modelKey) ? modelRaw.modelKey.trim() : ''
const provider = isNonEmptyString(modelRaw.provider) ? modelRaw.provider.trim() : ''
const modelId = isNonEmptyString(modelRaw.modelId) ? modelRaw.modelId.trim() : ''
const parsed = parseModelKeyStrict(modelKey)
if (!parsed || parsed.provider !== provider || parsed.modelId !== modelId) {
summary.userPreference.invalidCustomModelShape += 1
addSample(summary, {
table: 'userPreference',
rowId: pref.id,
field: `customModels[${index}].modelKey`,
reason: 'modelKey/provider/modelId mismatch',
})
}
const modelType = isNonEmptyString(modelRaw.type) ? modelRaw.type.trim() : ''
const capabilityIssues = validateCapabilities(modelType, modelRaw.capabilities)
if (capabilityIssues.length > 0) {
summary.userPreference.invalidCapabilities += 1
addSample(summary, {
table: 'userPreference',
rowId: pref.id,
field: capabilityIssues[0].field,
reason: capabilityIssues[0].message,
})
}
}
}
const projects = await prisma.novelPromotionProject.findMany({
select: {
id: true,
analysisModel: true,
characterModel: true,
locationModel: true,
storyboardModel: true,
editModel: true,
videoModel: true,
},
})
for (const project of projects) {
summary.novelPromotionProject.total += 1
for (const field of MODEL_FIELDS) {
const rawValue = project[field]
if (!rawValue) continue
if (!parseModelKeyStrict(rawValue)) {
summary.novelPromotionProject.invalidModelKeyFields += 1
addSample(summary, {
table: 'novelPromotionProject',
rowId: project.id,
field,
reason: 'model field is not provider::modelId',
})
}
}
}
process.stdout.write(`${JSON.stringify(summary, null, 2)}\n`)
if (!STRICT) return
const hasViolations = summary.userPreference.invalidModelKeyFields > 0
|| summary.userPreference.invalidCustomModelsJson > 0
|| summary.userPreference.invalidCustomModelShape > 0
|| summary.userPreference.invalidCapabilities > 0
|| summary.novelPromotionProject.invalidModelKeyFields > 0
if (hasViolations) {
process.exitCode = 1
}
}
main()
.catch((error) => {
process.stderr.write(`[check-model-config-contract] failed: ${String(error)}\n`)
process.exitCode = 1
})
.finally(async () => {
if (prisma) {
await prisma.$disconnect()
}
})

View File

@@ -0,0 +1,52 @@
import { execSync } from 'node:child_process'
const ALLOWLIST = new Set<string>([
'src/lib/logging/core.ts',
'src/lib/logging/config.ts',
'src/lib/logging/context.ts',
'src/lib/logging/redact.ts',
'scripts/check-no-console.ts',
'scripts/guards/no-api-direct-llm-call.mjs',
'scripts/guards/no-internal-task-sync-fallback.mjs',
'scripts/guards/no-media-provider-bypass.mjs',
'scripts/guards/no-server-mirror-state.mjs',
'scripts/guards/task-loading-guard.mjs',
'scripts/guards/task-target-states-no-polling-guard.mjs',
])
function run(cmd: string): string {
try {
return execSync(cmd, { encoding: 'utf8' })
} catch (error: unknown) {
if (error && typeof error === 'object' && 'stdout' in error) {
const stdout = (error as { stdout?: unknown }).stdout
return typeof stdout === 'string' ? stdout : ''
}
return ''
}
}
function main() {
const output = run(`rg -n "console\\\\.(log|info|warn|error|debug)\\\\(" src scripts`)
const lines = output
.split('\n')
.map((line) => line.trim())
.filter(Boolean)
const violations = lines.filter((line) => {
const file = line.split(':', 1)[0]
return !ALLOWLIST.has(file)
})
if (violations.length > 0) {
process.stderr.write('[check:logs] found forbidden console usage:\n')
for (const line of violations) {
process.stderr.write(`- ${line}\n`)
}
process.exit(1)
}
process.stdout.write(`[check:logs] ok scanned=${lines.length} allowlist=${ALLOWLIST.size}\n`)
}
main()

View File

@@ -0,0 +1,323 @@
import { prisma } from '@/lib/prisma'
import { TASK_TYPE } from '@/lib/task/types'
type AnyJson = unknown
type Match = {
path: string
value: string
}
type Options = {
minutes: number
limit: number
projectId: string | null
strictNoData: boolean
includeEvents: boolean
maxEventsPerTask: number
json: boolean
}
type FailureType = 'normalize' | 'model' | 'cancelled' | 'other'
const MODEL_ERROR_CODES = new Set([
'GENERATION_FAILED',
'GENERATION_TIMEOUT',
'RATE_LIMIT',
'EXTERNAL_ERROR',
'SENSITIVE_CONTENT',
])
function parseNumberArg(name: string, fallback: number): number {
const raw = process.argv.find((arg) => arg.startsWith(`--${name}=`))
if (!raw) return fallback
const value = Number.parseInt(raw.split('=')[1] || '', 10)
return Number.isFinite(value) && value > 0 ? value : fallback
}
function parseStringArg(name: string): string | null {
const raw = process.argv.find((arg) => arg.startsWith(`--${name}=`))
if (!raw) return null
const value = (raw.split('=')[1] || '').trim()
return value || null
}
function parseBooleanArg(name: string, fallback = false): boolean {
const raw = process.argv.find((arg) => arg.startsWith(`--${name}=`))
if (!raw) return fallback
const value = (raw.split('=')[1] || '').trim().toLowerCase()
return value === '1' || value === 'true' || value === 'yes' || value === 'on'
}
function parseOptions(): Options {
return {
minutes: parseNumberArg('minutes', 60 * 24),
limit: parseNumberArg('limit', 200),
projectId: parseStringArg('projectId'),
strictNoData: parseBooleanArg('strictNoData', false),
includeEvents: parseBooleanArg('includeEvents', false),
maxEventsPerTask: parseNumberArg('maxEventsPerTask', 40),
json: parseBooleanArg('json', false),
}
}
function toExcerpt(value: string, max = 180): string {
if (value.length <= max) return value
return `${value.slice(0, max)}...`
}
function findStringMatches(
value: AnyJson,
predicate: (input: string) => boolean,
path = '$',
matches: Match[] = [],
): Match[] {
if (typeof value === 'string') {
if (predicate(value)) matches.push({ path, value })
return matches
}
if (Array.isArray(value)) {
value.forEach((item, index) => {
findStringMatches(item, predicate, `${path}[${index}]`, matches)
})
return matches
}
if (value && typeof value === 'object') {
for (const [key, next] of Object.entries(value as Record<string, unknown>)) {
findStringMatches(next, predicate, `${path}.${key}`, matches)
}
}
return matches
}
function classifyFailure(task: {
errorCode: string | null
errorMessage: string | null
result: AnyJson | null
events: Array<{ payload: AnyJson | null }>
}): FailureType {
const code = (task.errorCode || '').trim().toUpperCase()
const normalizeRe = /normalize|video_frame_normalize|normalizeReferenceImagesForGeneration|reference image normalize failed|outbound image input is empty|relative_path_rejected/i
const modelRe = /generation failed|provider|upstream|rate limit|timed out|timeout|sensitive/i
if (code === 'TASK_CANCELLED') return 'cancelled'
if (MODEL_ERROR_CODES.has(code)) return 'model'
if (code) {
const explicitNormalizeCode = code === 'INVALID_PARAMS' || code === 'OUTBOUND_IMAGE_FETCH_FAILED'
if (explicitNormalizeCode) return 'normalize'
return 'other'
}
const values: string[] = []
if (code) values.push(code)
if (task.errorMessage) values.push(task.errorMessage)
if (task.result) {
for (const hit of findStringMatches(task.result, () => true)) {
values.push(hit.value)
}
}
for (const event of task.events) {
if (!event.payload) continue
for (const hit of findStringMatches(event.payload, () => true)) {
values.push(hit.value)
}
}
if (values.some((item) => normalizeRe.test(item))) return 'normalize'
if (values.some((item) => modelRe.test(item))) return 'model'
return 'other'
}
async function main() {
const options = parseOptions()
const since = new Date(Date.now() - options.minutes * 60_000)
const monitoredTypes = [
TASK_TYPE.MODIFY_ASSET_IMAGE,
TASK_TYPE.ASSET_HUB_MODIFY,
TASK_TYPE.VIDEO_PANEL,
]
const tasks = await prisma.task.findMany({
where: {
type: { in: monitoredTypes },
createdAt: { gte: since },
...(options.projectId ? { projectId: options.projectId } : {}),
},
select: {
id: true,
type: true,
status: true,
projectId: true,
targetType: true,
targetId: true,
createdAt: true,
errorCode: true,
errorMessage: true,
payload: true,
result: true,
},
orderBy: { createdAt: 'desc' },
take: options.limit,
})
if (tasks.length === 0) {
process.stdout.write(
`[check:outbound-image-runtime-sample] no data window=${options.minutes}m limit=${options.limit} strictNoData=${options.strictNoData}\n`,
)
if (options.strictNoData) process.exit(2)
return
}
const eventsByTaskId = new Map<string, Array<{ eventType: string; payload: AnyJson | null; createdAt: Date }>>()
let eventCount = 0
if (options.includeEvents) {
for (const task of tasks) {
const rows = await prisma.taskEvent.findMany({
where: { taskId: task.id },
select: {
taskId: true,
eventType: true,
payload: true,
createdAt: true,
},
orderBy: { id: 'desc' },
take: options.maxEventsPerTask,
})
const ordered = [...rows].reverse()
eventCount += ordered.length
if (ordered.length > 0) {
eventsByTaskId.set(
task.id,
ordered.map((event) => ({
eventType: event.eventType,
payload: event.payload,
createdAt: event.createdAt,
})),
)
}
}
}
const nextImagePredicate = (input: string) => input.includes('/_next/image')
const hits: Array<{
taskId: string
taskType: string
source: 'task.payload' | 'task.result' | 'task.event'
path: string
value: string
}> = []
let failedCount = 0
const failedByClass: Record<FailureType, number> = {
normalize: 0,
model: 0,
cancelled: 0,
other: 0,
}
const failedByCode: Record<string, number> = {}
for (const task of tasks) {
const taskEventsForTask = eventsByTaskId.get(task.id) || []
if (task.payload) {
for (const match of findStringMatches(task.payload, nextImagePredicate)) {
hits.push({
taskId: task.id,
taskType: task.type,
source: 'task.payload',
path: match.path,
value: match.value,
})
}
}
if (task.result) {
for (const match of findStringMatches(task.result, nextImagePredicate)) {
hits.push({
taskId: task.id,
taskType: task.type,
source: 'task.result',
path: match.path,
value: match.value,
})
}
}
for (const event of taskEventsForTask) {
if (!event.payload) continue
for (const match of findStringMatches(event.payload, nextImagePredicate)) {
hits.push({
taskId: task.id,
taskType: task.type,
source: 'task.event',
path: match.path,
value: match.value,
})
}
}
if (task.status === 'failed') {
failedCount += 1
const code = (task.errorCode || 'UNKNOWN').trim() || 'UNKNOWN'
failedByCode[code] = (failedByCode[code] || 0) + 1
const failureType = classifyFailure({
errorCode: task.errorCode,
errorMessage: task.errorMessage,
result: task.result,
events: taskEventsForTask,
})
failedByClass[failureType] += 1
}
}
const typeCount = tasks.reduce<Record<string, number>>((acc, item) => {
acc[item.type] = (acc[item.type] || 0) + 1
return acc
}, {})
process.stdout.write(
`[check:outbound-image-runtime-sample] window=${options.minutes}m sampled=${tasks.length} events=${eventCount} includeEvents=${options.includeEvents} next_image_hits=${hits.length}\n`,
)
process.stdout.write(`[check:outbound-image-runtime-sample] task_types=${JSON.stringify(typeCount)}\n`)
process.stdout.write(
`[check:outbound-image-runtime-sample] failures total=${failedCount} normalize=${failedByClass.normalize} model=${failedByClass.model} cancelled=${failedByClass.cancelled} other=${failedByClass.other} by_code=${JSON.stringify(failedByCode)}\n`,
)
if (options.json) {
process.stdout.write(
`${JSON.stringify({
windowMinutes: options.minutes,
sampled: tasks.length,
events: eventCount,
includeEvents: options.includeEvents,
nextImageHits: hits.length,
taskTypes: typeCount,
failures: {
total: failedCount,
byClass: failedByClass,
byCode: failedByCode,
},
})}\n`,
)
}
if (hits.length > 0) {
process.stderr.write('[check:outbound-image-runtime-sample] found /_next/image contamination:\n')
for (const hit of hits.slice(0, 20)) {
process.stderr.write(
`- task=${hit.taskId} type=${hit.taskType} source=${hit.source} path=${hit.path} value=${toExcerpt(hit.value)}\n`,
)
}
process.exit(1)
}
}
main()
.catch((error) => {
const message = error instanceof Error ? error.message : String(error)
process.stderr.write(`[check:outbound-image-runtime-sample] failed: ${message}\n`)
process.exit(1)
})
.finally(async () => {
await prisma.$disconnect()
})

View File

@@ -0,0 +1,224 @@
import { prisma } from '@/lib/prisma'
import { TASK_STATUS, TASK_TYPE } from '@/lib/task/types'
type StatusCount = Record<string, number>
type WindowSummary = {
total: number
finishedTotal: number
completed: number
failed: number
successRate: number | null
byStatus: StatusCount
byType: Record<string, number>
}
type Options = {
minutes: number
baselineMinutes: number
baselineOffsetMinutes: number
projectId: string | null
tolerancePct: number
minFinishedSamples: number
strict: boolean
json: boolean
}
const DEFAULT_MINUTES = 60 * 24 * 7
const DEFAULT_TOLERANCE_PCT = 2
const DEFAULT_MIN_FINISHED_SAMPLES = 20
function parseNumberArg(name: string, fallback: number): number {
const raw = process.argv.find((arg) => arg.startsWith(`--${name}=`))
if (!raw) return fallback
const value = Number.parseFloat(raw.split('=')[1] || '')
return Number.isFinite(value) && value > 0 ? value : fallback
}
function parseBooleanArg(name: string, fallback = false): boolean {
const raw = process.argv.find((arg) => arg.startsWith(`--${name}=`))
if (!raw) return fallback
const value = (raw.split('=')[1] || '').trim().toLowerCase()
return value === '1' || value === 'true' || value === 'yes' || value === 'on'
}
function parseStringArg(name: string): string | null {
const raw = process.argv.find((arg) => arg.startsWith(`--${name}=`))
if (!raw) return null
const value = (raw.split('=')[1] || '').trim()
return value || null
}
function parseOptions(): Options {
const minutes = parseNumberArg('minutes', DEFAULT_MINUTES)
const baselineMinutes = parseNumberArg('baselineMinutes', minutes)
const baselineOffsetMinutes = parseNumberArg('baselineOffsetMinutes', minutes)
return {
minutes,
baselineMinutes,
baselineOffsetMinutes,
projectId: parseStringArg('projectId'),
tolerancePct: parseNumberArg('tolerancePct', DEFAULT_TOLERANCE_PCT),
minFinishedSamples: parseNumberArg('minFinishedSamples', DEFAULT_MIN_FINISHED_SAMPLES),
strict: parseBooleanArg('strict', false),
json: parseBooleanArg('json', false),
}
}
function asPct(value: number | null): string {
return value === null ? 'N/A' : `${value.toFixed(2)}%`
}
function getSuccessRate(completed: number, failed: number): number | null {
const total = completed + failed
if (total <= 0) return null
return (completed / total) * 100
}
function summarizeRows(
rows: Array<{ status: string; type: string }>,
): WindowSummary {
const byStatus: StatusCount = {}
const byType: Record<string, number> = {}
for (const row of rows) {
byStatus[row.status] = (byStatus[row.status] || 0) + 1
byType[row.type] = (byType[row.type] || 0) + 1
}
const completed = byStatus[TASK_STATUS.COMPLETED] || 0
const failed = byStatus[TASK_STATUS.FAILED] || 0
const finishedTotal = completed + failed
return {
total: rows.length,
finishedTotal,
completed,
failed,
successRate: getSuccessRate(completed, failed),
byStatus,
byType,
}
}
async function fetchWindowSummary(params: {
from: Date
to: Date
projectId: string | null
}) {
const monitoredTypes = [
TASK_TYPE.MODIFY_ASSET_IMAGE,
TASK_TYPE.ASSET_HUB_MODIFY,
TASK_TYPE.VIDEO_PANEL,
]
const rows = await prisma.task.findMany({
where: {
type: { in: monitoredTypes },
createdAt: {
gte: params.from,
lt: params.to,
},
...(params.projectId ? { projectId: params.projectId } : {}),
},
select: {
status: true,
type: true,
},
})
return summarizeRows(rows)
}
async function main() {
const options = parseOptions()
const now = Date.now()
const currentEnd = new Date(now)
const currentStart = new Date(now - options.minutes * 60_000)
const baselineEnd = new Date(now - options.baselineOffsetMinutes * 60_000)
const baselineStart = new Date(baselineEnd.getTime() - options.baselineMinutes * 60_000)
const [current, baseline] = await Promise.all([
fetchWindowSummary({
from: currentStart,
to: currentEnd,
projectId: options.projectId,
}),
fetchWindowSummary({
from: baselineStart,
to: baselineEnd,
projectId: options.projectId,
}),
])
const hasEnoughCurrent = current.finishedTotal >= options.minFinishedSamples
const hasEnoughBaseline = baseline.finishedTotal >= options.minFinishedSamples
const hasEnoughSamples = hasEnoughCurrent && hasEnoughBaseline
const rateDeltaPct =
current.successRate !== null && baseline.successRate !== null
? current.successRate - baseline.successRate
: null
const meetsTolerance =
rateDeltaPct !== null
? rateDeltaPct >= -Math.abs(options.tolerancePct)
: false
const status = hasEnoughSamples
? meetsTolerance
? 'pass'
: 'fail'
: 'blocked'
process.stdout.write(
`[check:outbound-image-success-rate] current=${asPct(current.successRate)} baseline=${asPct(baseline.successRate)} delta=${asPct(rateDeltaPct)} tolerance=-${Math.abs(options.tolerancePct).toFixed(2)}% status=${status}\n`,
)
process.stdout.write(
`[check:outbound-image-success-rate] current_finished=${current.finishedTotal} baseline_finished=${baseline.finishedTotal} min_required=${options.minFinishedSamples}\n`,
)
process.stdout.write(
`[check:outbound-image-success-rate] current_by_type=${JSON.stringify(current.byType)} baseline_by_type=${JSON.stringify(baseline.byType)}\n`,
)
if (options.json) {
process.stdout.write(
`${JSON.stringify({
status,
tolerancePct: options.tolerancePct,
minFinishedSamples: options.minFinishedSamples,
windows: {
current: {
from: currentStart.toISOString(),
to: currentEnd.toISOString(),
...current,
},
baseline: {
from: baselineStart.toISOString(),
to: baselineEnd.toISOString(),
...baseline,
},
},
rateDeltaPct,
hasEnoughSamples,
})}\n`,
)
}
if (!options.strict) return
if (status === 'pass') return
if (status === 'blocked') process.exit(2)
process.exit(1)
}
main()
.catch((error) => {
const message = error instanceof Error ? error.message : String(error)
process.stderr.write(`[check:outbound-image-success-rate] failed: ${message}\n`)
process.exit(1)
})
.finally(async () => {
await prisma.$disconnect()
})

View File

@@ -0,0 +1,172 @@
import fs from 'node:fs'
import path from 'node:path'
type Rule = {
file: string
pattern: RegExp
message: string
}
function readFile(relativePath: string): string {
const fullPath = path.resolve(process.cwd(), relativePath)
return fs.readFileSync(fullPath, 'utf8')
}
const mustIncludeRules: Rule[] = [
{
file: 'src/lib/media/outbound-image.ts',
pattern: /export\s+async\s+function\s+normalizeToOriginalMediaUrl\s*\(/,
message: 'missing normalizeToOriginalMediaUrl export',
},
{
file: 'src/lib/media/outbound-image.ts',
pattern: /export\s+async\s+function\s+normalizeToBase64ForGeneration\s*\(/,
message: 'missing normalizeToBase64ForGeneration export',
},
{
file: 'src/lib/media/outbound-image.ts',
pattern: /export\s+async\s+function\s+normalizeReferenceImagesForGeneration\s*\(/,
message: 'missing normalizeReferenceImagesForGeneration export',
},
{
file: 'src/lib/media/outbound-image.ts',
pattern: /class\s+OutboundImageNormalizeError\s+extends\s+Error/,
message: 'outbound-image.ts must expose structured normalize error type',
},
{
file: 'src/lib/media/outbound-image.ts',
pattern: /OUTBOUND_IMAGE_FETCH_FAILED/,
message: 'outbound-image.ts must classify fetch failures with structured error codes',
},
{
file: 'src/lib/media/outbound-image.ts',
pattern: /OUTBOUND_IMAGE_REFERENCE_ALL_FAILED/,
message: 'outbound-image.ts must fail explicitly when all references fail to normalize',
},
{
file: 'src/lib/cos.ts',
pattern: /import\s+\{\s*normalizeToBase64ForGeneration\s*\}\s+from\s+'@\/lib\/media\/outbound-image'/,
message: 'cos.ts must import normalizeToBase64ForGeneration',
},
{
file: 'src/lib/cos.ts',
pattern: /return\s+await\s+normalizeToBase64ForGeneration\(keyOrUrl\)/,
message: 'imageUrlToBase64 must delegate to normalizeToBase64ForGeneration',
},
{
file: 'src/lib/workers/handlers/image-task-handlers-core.ts',
pattern: /normalizeToBase64ForGeneration\(currentUrl\)/,
message: 'image-task-handlers-core.ts must convert currentUrl to base64 before outbound',
},
{
file: 'src/lib/workers/handlers/image-task-handlers-core.ts',
pattern: /normalizeReferenceImagesForGeneration\(extraReferenceInputs\)/,
message: 'image-task-handlers-core.ts must normalize extra references before outbound',
},
{
file: 'src/lib/workers/video.worker.ts',
pattern: /const\s+sourceImageBase64\s*=\s*await\s+normalizeToBase64ForGeneration\(sourceImageUrl\)/,
message: 'video.worker.ts must normalize source frame to base64',
},
{
file: 'src/lib/workers/video.worker.ts',
pattern: /lastFrameImageBase64\s*=\s*await\s+normalizeToBase64ForGeneration\(lastFrameUrl\)/,
message: 'video.worker.ts must normalize last frame to base64',
},
{
file: 'src/app/api/novel-promotion/[projectId]/modify-asset-image/route.ts',
pattern: /sanitizeImageInputsForTaskPayload/,
message: 'modify-asset-image route must sanitize image inputs',
},
{
file: 'src/app/api/novel-promotion/[projectId]/modify-storyboard-image/route.ts',
pattern: /sanitizeImageInputsForTaskPayload/,
message: 'modify-storyboard-image route must sanitize image inputs',
},
{
file: 'src/app/api/asset-hub/modify-image/route.ts',
pattern: /sanitizeImageInputsForTaskPayload/,
message: 'asset-hub modify-image route must sanitize image inputs',
},
{
file: 'src/components/ui/ImagePreviewModal.tsx',
pattern: /import\s+\{\s*resolveOriginalImageUrl,\s*toDisplayImageUrl\s*\}\s+from\s+'@\/lib\/media\/image-url'/,
message: 'ImagePreviewModal must use shared image-url helpers',
},
{
file: 'src/lib/novel-promotion/stages/video-stage-runtime-core.tsx',
pattern: /onPreviewImage=\{setPreviewImage\}/,
message: 'Video stage runtime must wire preview callback to VideoPanelCard',
},
{
file: 'src/app/[locale]/workspace/[projectId]/modes/novel-promotion/components/video/panel-card/types.ts',
pattern: /onPreviewImage\?:\s*\(imageUrl:\s*string\)\s*=>\s*void/,
message: 'VideoPanelCard runtime props must expose onPreviewImage',
},
{
file: 'src/app/[locale]/workspace/[projectId]/modes/novel-promotion/components/video/panel-card/VideoPanelCardHeader.tsx',
pattern: /className="absolute left-1\/2 top-1\/2 z-10 h-16 w-16 -translate-x-1\/2 -translate-y-1\/2 rounded-full"/,
message: 'VideoPanelCard play trigger must be centered small button (preview/play separation)',
},
]
const mustNotIncludeRules: Rule[] = [
{
file: 'src/lib/workers/handlers/image-task-handlers-core.ts',
pattern: /referenceImages:\s*\[currentUrl\]/,
message: 'image-task-handlers-core.ts must not pass raw currentUrl directly as outbound reference',
},
{
file: 'src/lib/workers/video.worker.ts',
pattern: /imageUrl:\s*sourceImageUrl/,
message: 'video.worker.ts must not pass raw sourceImageUrl to generator',
},
{
file: 'src/lib/media/outbound-image.ts',
pattern: /return\s+await\s+toFetchableAbsoluteUrl\(mediaPath\)/,
message: 'outbound-image.ts must not silently fallback when /m route cannot resolve storage key',
},
{
file: 'src/lib/media/outbound-image.ts',
pattern: /return\s+await\s+toFetchableAbsoluteUrl\(unwrappedInput\)/,
message: 'outbound-image.ts must not silently fallback unknown inputs to fetchable url',
},
]
function main() {
const errors: string[] = []
const cache = new Map<string, string>()
const getContent = (file: string) => {
if (!cache.has(file)) cache.set(file, readFile(file))
return cache.get(file) as string
}
for (const rule of mustIncludeRules) {
const content = getContent(rule.file)
if (!rule.pattern.test(content)) {
errors.push(`${rule.file}: ${rule.message}`)
}
}
for (const rule of mustNotIncludeRules) {
const content = getContent(rule.file)
if (rule.pattern.test(content)) {
errors.push(`${rule.file}: ${rule.message}`)
}
}
if (errors.length > 0) {
process.stderr.write('[check:outbound-image-unification] found violations:\n')
for (const error of errors) {
process.stderr.write(`- ${error}\n`)
}
process.exit(1)
}
process.stdout.write(
`[check:outbound-image-unification] ok include_checks=${mustIncludeRules.length} exclude_checks=${mustNotIncludeRules.length}\n`,
)
}
main()

View File

@@ -0,0 +1,293 @@
import { promises as fs } from 'node:fs'
import path from 'node:path'
const CATALOG_DIR = path.resolve(process.cwd(), 'standards/pricing')
const CAPABILITY_CATALOG_FILE = path.resolve(process.cwd(), 'standards/capabilities/image-video.catalog.json')
const API_TYPES = new Set(['text', 'image', 'video', 'voice', 'voice-design', 'lip-sync'])
const PRICING_MODES = new Set(['flat', 'capability'])
const TEXT_TOKEN_TYPES = new Set(['input', 'output'])
function isRecord(value) {
return !!value && typeof value === 'object' && !Array.isArray(value)
}
function isNonEmptyString(value) {
return typeof value === 'string' && value.trim().length > 0
}
function isCapabilityValue(value) {
return typeof value === 'string' || typeof value === 'number' || typeof value === 'boolean'
}
function isFiniteNumber(value) {
return typeof value === 'number' && Number.isFinite(value)
}
function pushIssue(issues, file, index, field, message) {
issues.push({ file, index, field, message })
}
function getProviderKey(providerId) {
const marker = providerId.indexOf(':')
return marker === -1 ? providerId : providerId.slice(0, marker)
}
function buildModelKey(modelType, provider, modelId) {
return `${modelType}::${provider}::${modelId}`
}
async function listCatalogFiles() {
const entries = await fs.readdir(CATALOG_DIR, { withFileTypes: true })
return entries
.filter((entry) => entry.isFile() && entry.name.endsWith('.json'))
.map((entry) => path.join(CATALOG_DIR, entry.name))
}
async function readCatalog(filePath) {
const raw = await fs.readFile(filePath, 'utf8')
const parsed = JSON.parse(raw)
if (!Array.isArray(parsed)) {
throw new Error(`catalog must be an array: ${filePath}`)
}
return parsed
}
async function readCapabilityCatalog() {
const raw = await fs.readFile(CAPABILITY_CATALOG_FILE, 'utf8')
const parsed = JSON.parse(raw)
if (!Array.isArray(parsed)) {
throw new Error(`capability catalog must be an array: ${CAPABILITY_CATALOG_FILE}`)
}
return parsed
}
function extractCapabilityOptionFields(modelType, capabilities) {
if (!isRecord(capabilities)) return new Set()
const namespace = capabilities[modelType]
if (!isRecord(namespace)) return new Set()
const fields = new Set()
for (const [key, value] of Object.entries(namespace)) {
if (!key.endsWith('Options')) continue
if (!Array.isArray(value) || value.length === 0) continue
const field = key.slice(0, -'Options'.length)
fields.add(field)
}
return fields
}
function buildCapabilityOptionFieldMap(capabilityEntries) {
const map = new Map()
for (const entry of capabilityEntries) {
if (!isRecord(entry)) continue
const modelType = typeof entry.modelType === 'string' ? entry.modelType.trim() : ''
const provider = typeof entry.provider === 'string' ? entry.provider.trim() : ''
const modelId = typeof entry.modelId === 'string' ? entry.modelId.trim() : ''
if (!modelType || !provider || !modelId) continue
const fields = extractCapabilityOptionFields(modelType, entry.capabilities)
map.set(buildModelKey(modelType, provider, modelId), fields)
const providerKey = getProviderKey(provider)
const fallbackKey = buildModelKey(modelType, providerKey, modelId)
if (!map.has(fallbackKey)) {
map.set(fallbackKey, fields)
}
}
return map
}
function validateTier(issues, file, index, tier, tierIndex) {
if (!isRecord(tier)) {
pushIssue(issues, file, index, `pricing.tiers[${tierIndex}]`, 'tier must be object')
return
}
if (!isRecord(tier.when) || Object.keys(tier.when).length === 0) {
pushIssue(issues, file, index, `pricing.tiers[${tierIndex}].when`, 'when must be non-empty object')
} else {
for (const [field, value] of Object.entries(tier.when)) {
if (!isCapabilityValue(value)) {
pushIssue(
issues,
file,
index,
`pricing.tiers[${tierIndex}].when.${field}`,
'condition value must be string/number/boolean',
)
}
}
}
if (!isFiniteNumber(tier.amount) || tier.amount < 0) {
pushIssue(issues, file, index, `pricing.tiers[${tierIndex}].amount`, 'amount must be finite number >= 0')
}
}
function validateTextCapabilityTiers(issues, file, index, tiers) {
const seenTokenTypes = new Set()
for (let tierIndex = 0; tierIndex < tiers.length; tierIndex += 1) {
const tier = tiers[tierIndex]
if (!isRecord(tier) || !isRecord(tier.when)) continue
const whenFields = Object.keys(tier.when)
if (whenFields.length !== 1 || whenFields[0] !== 'tokenType') {
pushIssue(issues, file, index, `pricing.tiers[${tierIndex}].when`, 'text capability tier must only contain tokenType')
continue
}
const tokenType = tier.when.tokenType
if (typeof tokenType !== 'string' || !TEXT_TOKEN_TYPES.has(tokenType)) {
pushIssue(issues, file, index, `pricing.tiers[${tierIndex}].when.tokenType`, 'tokenType must be input or output')
continue
}
if (seenTokenTypes.has(tokenType)) {
pushIssue(issues, file, index, `pricing.tiers[${tierIndex}].when.tokenType`, `duplicate tokenType tier: ${tokenType}`)
continue
}
seenTokenTypes.add(tokenType)
}
for (const requiredTokenType of TEXT_TOKEN_TYPES) {
if (!seenTokenTypes.has(requiredTokenType)) {
pushIssue(issues, file, index, 'pricing.tiers', `missing text tier tokenType=${requiredTokenType}`)
}
}
}
function validateMediaCapabilityTierFields(issues, file, index, item, tiers, capabilityOptionFieldsMap) {
const modelType = item.apiType
const provider = item.provider
const modelId = item.modelId
const modelKey = buildModelKey(modelType, provider, modelId)
const fallbackKey = buildModelKey(modelType, getProviderKey(provider), modelId)
const optionFields = capabilityOptionFieldsMap.get(modelKey) || capabilityOptionFieldsMap.get(fallbackKey)
if (!optionFields || optionFields.size === 0) {
pushIssue(issues, file, index, 'pricing.tiers', `no capability option fields found for ${modelType} ${provider}/${modelId}`)
return
}
for (let tierIndex = 0; tierIndex < tiers.length; tierIndex += 1) {
const tier = tiers[tierIndex]
if (!isRecord(tier) || !isRecord(tier.when)) continue
for (const field of Object.keys(tier.when)) {
if (!optionFields.has(field)) {
pushIssue(
issues,
file,
index,
`pricing.tiers[${tierIndex}].when.${field}`,
`field ${field} is not declared in capabilities options for ${modelType} ${provider}/${modelId}`,
)
}
}
}
}
function validateDuplicateCapabilityTiers(issues, file, index, tiers) {
const seen = new Set()
for (let tierIndex = 0; tierIndex < tiers.length; tierIndex += 1) {
const tier = tiers[tierIndex]
if (!isRecord(tier) || !isRecord(tier.when)) continue
const signature = JSON.stringify(Object.entries(tier.when).sort((left, right) => left[0].localeCompare(right[0])))
if (seen.has(signature)) {
pushIssue(issues, file, index, `pricing.tiers[${tierIndex}].when`, 'duplicate capability tier condition')
continue
}
seen.add(signature)
}
}
function validatePricing(issues, file, index, item, capabilityOptionFieldsMap) {
const pricing = item.pricing
if (!isRecord(pricing)) {
pushIssue(issues, file, index, 'pricing', 'pricing must be object')
return
}
if (!isNonEmptyString(pricing.mode) || !PRICING_MODES.has(pricing.mode)) {
pushIssue(issues, file, index, 'pricing.mode', 'pricing.mode must be flat or capability')
return
}
if (pricing.mode === 'flat') {
if (!isFiniteNumber(pricing.flatAmount) || pricing.flatAmount < 0) {
pushIssue(issues, file, index, 'pricing.flatAmount', 'flatAmount must be finite number >= 0')
}
return
}
if (!Array.isArray(pricing.tiers) || pricing.tiers.length === 0) {
pushIssue(issues, file, index, 'pricing.tiers', 'tiers must be non-empty array')
return
}
for (let tierIndex = 0; tierIndex < pricing.tiers.length; tierIndex += 1) {
validateTier(issues, file, index, pricing.tiers[tierIndex], tierIndex)
}
validateDuplicateCapabilityTiers(issues, file, index, pricing.tiers)
if (item.apiType === 'text') {
validateTextCapabilityTiers(issues, file, index, pricing.tiers)
return
}
if (item.apiType === 'image' || item.apiType === 'video') {
validateMediaCapabilityTierFields(issues, file, index, item, pricing.tiers, capabilityOptionFieldsMap)
}
}
async function main() {
const issues = []
const files = await listCatalogFiles()
const capabilityCatalog = await readCapabilityCatalog()
const capabilityOptionFieldsMap = buildCapabilityOptionFieldMap(capabilityCatalog)
if (files.length === 0) {
throw new Error(`no pricing files found in ${CATALOG_DIR}`)
}
for (const filePath of files) {
const items = await readCatalog(filePath)
for (let index = 0; index < items.length; index += 1) {
const item = items[index]
if (!isRecord(item)) {
pushIssue(issues, filePath, index, 'entry', 'entry must be object')
continue
}
if (!isNonEmptyString(item.apiType) || !API_TYPES.has(item.apiType)) {
pushIssue(issues, filePath, index, 'apiType', 'apiType must be one of text/image/video/voice/voice-design/lip-sync')
}
if (!isNonEmptyString(item.provider)) {
pushIssue(issues, filePath, index, 'provider', 'provider must be non-empty string')
}
if (!isNonEmptyString(item.modelId)) {
pushIssue(issues, filePath, index, 'modelId', 'modelId must be non-empty string')
}
validatePricing(issues, filePath, index, item, capabilityOptionFieldsMap)
}
}
if (issues.length === 0) {
process.stdout.write(`[check-pricing-catalog] OK (${files.length} files)\n`)
return
}
const maxPrint = 50
for (const issue of issues.slice(0, maxPrint)) {
process.stdout.write(`[check-pricing-catalog] ${issue.file}#${issue.index} ${issue.field}: ${issue.message}\n`)
}
if (issues.length > maxPrint) {
process.stdout.write(`[check-pricing-catalog] ... ${issues.length - maxPrint} more issues\n`)
}
process.exitCode = 1
}
main().catch((error) => {
process.stderr.write(`[check-pricing-catalog] failed: ${String(error)}\n`)
process.exitCode = 1
})

View File

@@ -0,0 +1,197 @@
import { prisma } from '@/lib/prisma'
type CharacterVoiceRecord = {
id: string
customVoiceUrl: string | null
}
type SpeakerVoiceConfig = {
voiceType?: unknown
voiceId?: unknown
audioUrl?: unknown
[key: string]: unknown
}
type CleanupSummary = {
projectCharactersUpdated: number
globalCharactersUpdated: number
episodeSpeakerVoicesUpdated: number
episodeSpeakerVoicesCleared: number
invalidSpeakerVoicesSkipped: number
}
function hasPlayableAudioUrl(value: unknown) {
return typeof value === 'string' && value.trim().length > 0
}
function normalizeVoiceType(customVoiceUrl: string | null) {
return hasPlayableAudioUrl(customVoiceUrl) ? 'custom' : null
}
async function cleanupCharacterTable(records: CharacterVoiceRecord[], table: 'project' | 'global') {
let updated = 0
for (const row of records) {
const nextVoiceType = normalizeVoiceType(row.customVoiceUrl)
if (table === 'project') {
await prisma.novelPromotionCharacter.update({
where: { id: row.id },
data: {
voiceType: nextVoiceType,
voiceId: null,
},
})
} else {
await prisma.globalCharacter.update({
where: { id: row.id },
data: {
voiceType: nextVoiceType,
voiceId: null,
},
})
}
updated += 1
}
return updated
}
function normalizeSpeakerVoices(payload: string): {
ok: true
changed: boolean
cleared: boolean
next: string | null
} | {
ok: false
} {
let parsed: unknown
try {
parsed = JSON.parse(payload)
} catch {
return { ok: false }
}
if (!parsed || typeof parsed !== 'object' || Array.isArray(parsed)) {
return { ok: false }
}
const source = parsed as Record<string, unknown>
const next: Record<string, SpeakerVoiceConfig> = {}
let changed = false
for (const [speaker, value] of Object.entries(source)) {
if (!value || typeof value !== 'object' || Array.isArray(value)) {
return { ok: false }
}
const config = { ...(value as SpeakerVoiceConfig) }
if (config.voiceType === 'azure') {
if (hasPlayableAudioUrl(config.audioUrl)) {
config.voiceType = 'custom'
config.voiceId = null
next[speaker] = config
} else {
// No usable audio, drop stale azure speaker config.
}
changed = true
continue
}
next[speaker] = config
}
const keys = Object.keys(next)
if (keys.length === 0) {
return {
ok: true,
changed,
cleared: true,
next: null,
}
}
return {
ok: true,
changed,
cleared: false,
next: changed ? JSON.stringify(next) : payload,
}
}
async function main() {
const summary: CleanupSummary = {
projectCharactersUpdated: 0,
globalCharactersUpdated: 0,
episodeSpeakerVoicesUpdated: 0,
episodeSpeakerVoicesCleared: 0,
invalidSpeakerVoicesSkipped: 0,
}
const [projectCharacters, globalCharacters] = await Promise.all([
prisma.novelPromotionCharacter.findMany({
where: { voiceType: 'azure' },
select: {
id: true,
customVoiceUrl: true,
},
}),
prisma.globalCharacter.findMany({
where: { voiceType: 'azure' },
select: {
id: true,
customVoiceUrl: true,
},
}),
])
summary.projectCharactersUpdated = await cleanupCharacterTable(projectCharacters, 'project')
summary.globalCharactersUpdated = await cleanupCharacterTable(globalCharacters, 'global')
const episodes = await prisma.novelPromotionEpisode.findMany({
where: {
speakerVoices: { not: null },
},
select: {
id: true,
speakerVoices: true,
},
})
for (const row of episodes) {
const speakerVoices = row.speakerVoices
if (!speakerVoices || !speakerVoices.includes('"voiceType":"azure"')) {
continue
}
const normalized = normalizeSpeakerVoices(speakerVoices)
if (!normalized.ok) {
summary.invalidSpeakerVoicesSkipped += 1
continue
}
if (!normalized.changed) {
continue
}
await prisma.novelPromotionEpisode.update({
where: { id: row.id },
data: {
speakerVoices: normalized.next,
},
})
summary.episodeSpeakerVoicesUpdated += 1
if (normalized.cleared) {
summary.episodeSpeakerVoicesCleared += 1
}
}
process.stdout.write(`${JSON.stringify({
ok: true,
checkedAt: new Date().toISOString(),
summary,
}, null, 2)}\n`)
}
main()
.catch((error) => {
process.stderr.write(`${error instanceof Error ? error.stack || error.message : String(error)}\n`)
process.exitCode = 1
})
.finally(async () => {
await prisma.$disconnect()
})

View File

@@ -0,0 +1,89 @@
#!/usr/bin/env node
import fs from 'fs'
import path from 'path'
const ROOT = process.cwd()
const RULES = [
{
label: 'component',
dir: 'src',
include: (relPath) =>
relPath.includes('/components/')
&& /\.(ts|tsx)$/.test(relPath),
limit: 500,
},
{
label: 'hook',
dir: 'src',
include: (relPath) =>
(relPath.includes('/hooks/') || /\/use[A-Z].+\.(ts|tsx)$/.test(relPath))
&& /\.(ts|tsx)$/.test(relPath),
limit: 400,
},
{
label: 'worker-handler',
dir: 'src/lib/workers/handlers',
include: (relPath) => /\.(ts|tsx)$/.test(relPath),
limit: 300,
},
{
label: 'mutation',
dir: 'src/lib/query/mutations',
include: (relPath) => /\.(ts|tsx)$/.test(relPath) && !relPath.endsWith('/index.ts'),
limit: 300,
},
]
const walkFiles = (absDir, relBase = '') => {
if (!fs.existsSync(absDir)) return []
const entries = fs.readdirSync(absDir, { withFileTypes: true })
const out = []
for (const entry of entries) {
const abs = path.join(absDir, entry.name)
const rel = path.join(relBase, entry.name).replace(/\\/g, '/')
if (entry.isDirectory()) {
out.push(...walkFiles(abs, rel))
continue
}
out.push({ absPath: abs, relPath: rel })
}
return out
}
const countLines = (absPath) => {
const raw = fs.readFileSync(absPath, 'utf8')
if (raw.length === 0) return 0
return raw.split('\n').length
}
const violations = []
for (const rule of RULES) {
const absDir = path.join(ROOT, rule.dir)
const files = walkFiles(absDir, rule.dir).filter((f) => rule.include(f.relPath))
for (const file of files) {
const lineCount = countLines(file.absPath)
if (lineCount > rule.limit) {
violations.push({
label: rule.label,
relPath: file.relPath,
lineCount,
limit: rule.limit,
})
}
}
}
if (violations.length === 0) {
process.stdout.write('[file-line-count-guard] PASS\n')
process.exit(0)
}
process.stderr.write('[file-line-count-guard] FAIL: file size budget exceeded\n')
for (const violation of violations) {
process.stderr.write(
`- [${violation.label}] ${violation.relPath}: ${violation.lineCount} > ${violation.limit}\n`,
)
}
process.exit(1)

View File

@@ -0,0 +1,77 @@
#!/usr/bin/env node
import fs from 'fs'
import path from 'path'
import process from 'process'
const root = process.cwd()
const scanRoots = ['src/app/api', 'src/pages/api']
const allowedPrefixes = []
const sourceExtensions = new Set(['.ts', '.tsx', '.js', '.jsx', '.mjs', '.cjs'])
function fail(title, details = []) {
console.error(`\n[no-api-direct-llm-call] ${title}`)
for (const line of details) {
console.error(` - ${line}`)
}
process.exit(1)
}
function toRel(fullPath) {
return path.relative(root, fullPath).split(path.sep).join('/')
}
function walk(dir, out = []) {
if (!fs.existsSync(dir)) return out
const entries = fs.readdirSync(dir, { withFileTypes: true })
for (const entry of entries) {
if (entry.name === '.git' || entry.name === '.next' || entry.name === 'node_modules') continue
const fullPath = path.join(dir, entry.name)
if (entry.isDirectory()) {
walk(fullPath, out)
continue
}
const ext = path.extname(entry.name)
if (sourceExtensions.has(ext)) {
out.push(fullPath)
}
}
return out
}
function isAllowedFile(relPath) {
return allowedPrefixes.some((prefix) => relPath.startsWith(prefix))
}
function collectViolations(fullPath) {
const relPath = toRel(fullPath)
if (isAllowedFile(relPath)) return []
const content = fs.readFileSync(fullPath, 'utf8')
const lines = content.split('\n')
const violations = []
for (let i = 0; i < lines.length; i += 1) {
const line = lines[i]
if (/from\s+['"]@\/lib\/llm-client['"]/.test(line)) {
violations.push(`${relPath}:${i + 1} forbidden import from '@/lib/llm-client'`)
}
if (/\bchatCompletion[A-Za-z0-9_]*\s*\(/.test(line)) {
violations.push(`${relPath}:${i + 1} forbidden direct chatCompletion* call`)
}
if (/\bisInternalTaskExecution\b/.test(line)) {
violations.push(`${relPath}:${i + 1} forbidden dual-track fallback marker isInternalTaskExecution`)
}
}
return violations
}
const allFiles = scanRoots.flatMap((scanRoot) => walk(path.join(root, scanRoot)))
const violations = allFiles.flatMap((fullPath) => collectViolations(fullPath))
if (violations.length > 0) {
fail('Found forbidden direct LLM execution in production API routes', violations)
}
console.log('[no-api-direct-llm-call] OK')

View File

@@ -0,0 +1,45 @@
#!/usr/bin/env node
import fs from 'fs'
import path from 'path'
const ROOT = process.cwd()
const API_ROOT = path.join(ROOT, 'src', 'app', 'api')
const KNOWN_DUPLICATE_GROUPS = [
{
key: 'user-llm-test-connection',
candidates: [
'src/app/api/user/api-config/test-connection/route.ts',
'src/app/api/user/test-llm-provider/route.ts',
],
},
]
const exists = (relPath) => fs.existsSync(path.join(ROOT, relPath))
const failures = []
for (const group of KNOWN_DUPLICATE_GROUPS) {
const present = group.candidates.filter(exists)
if (present.length > 1) {
failures.push({ key: group.key, present })
}
}
if (!fs.existsSync(API_ROOT)) {
process.stdout.write('[no-duplicate-endpoint-entry] PASS (api dir missing)\n')
process.exit(0)
}
if (failures.length === 0) {
process.stdout.write('[no-duplicate-endpoint-entry] PASS\n')
process.exit(0)
}
process.stderr.write('[no-duplicate-endpoint-entry] FAIL: duplicated endpoint entry detected\n')
for (const failure of failures) {
process.stderr.write(`- ${failure.key}\n`)
for (const relPath of failure.present) {
process.stderr.write(` - ${relPath}\n`)
}
}
process.exit(1)

View File

@@ -0,0 +1,73 @@
#!/usr/bin/env node
import fs from 'fs'
import path from 'path'
import process from 'process'
const root = process.cwd()
const sourceExtensions = new Set(['.ts', '.tsx', '.js', '.jsx', '.mjs', '.cjs'])
const scanRoots = ['src']
const allowConstantDefinitionsIn = new Set([
'src/lib/constants.ts',
])
const forbiddenCapabilityConstants = [
'VIDEO_MODELS',
'FIRST_LAST_FRAME_MODELS',
'AUDIO_SUPPORTED_MODELS',
'BANANA_MODELS',
'BANANA_RESOLUTION_OPTIONS',
]
function fail(title, details = []) {
console.error(`\n[no-hardcoded-model-capabilities] ${title}`)
for (const line of details) {
console.error(` - ${line}`)
}
process.exit(1)
}
function toRel(fullPath) {
return path.relative(root, fullPath).split(path.sep).join('/')
}
function walk(dir, out = []) {
if (!fs.existsSync(dir)) return out
const entries = fs.readdirSync(dir, { withFileTypes: true })
for (const entry of entries) {
if (entry.name === '.git' || entry.name === '.next' || entry.name === 'node_modules') continue
const fullPath = path.join(dir, entry.name)
if (entry.isDirectory()) {
walk(fullPath, out)
continue
}
if (sourceExtensions.has(path.extname(entry.name))) {
out.push(fullPath)
}
}
return out
}
const files = scanRoots.flatMap((scanRoot) => walk(path.join(root, scanRoot)))
const violations = []
for (const fullPath of files) {
const relPath = toRel(fullPath)
if (allowConstantDefinitionsIn.has(relPath)) continue
const lines = fs.readFileSync(fullPath, 'utf8').split('\n')
for (let index = 0; index < lines.length; index += 1) {
const line = lines[index]
for (const token of forbiddenCapabilityConstants) {
const tokenPattern = new RegExp(`\\b${token}\\b`)
if (tokenPattern.test(line)) {
violations.push(`${relPath}:${index + 1} forbidden hardcoded model capability token ${token}`)
}
}
}
}
if (violations.length > 0) {
fail('Found hardcoded model capability usage', violations)
}
console.log('[no-hardcoded-model-capabilities] OK')

View File

@@ -0,0 +1,77 @@
#!/usr/bin/env node
import fs from 'fs'
import path from 'path'
import process from 'process'
const root = process.cwd()
const scanRoots = ['src/app/api', 'src/pages/api']
const allowedPrefixes = ['src/app/api/ui-review/', 'src/pages/api/ui-review/']
const sourceExtensions = new Set(['.ts', '.tsx', '.js', '.jsx', '.mjs', '.cjs'])
function fail(title, details = []) {
console.error(`\n[no-internal-task-sync-fallback] ${title}`)
for (const line of details) {
console.error(` - ${line}`)
}
process.exit(1)
}
function toRel(fullPath) {
return path.relative(root, fullPath).split(path.sep).join('/')
}
function walk(dir, out = []) {
if (!fs.existsSync(dir)) return out
const entries = fs.readdirSync(dir, { withFileTypes: true })
for (const entry of entries) {
if (entry.name === '.git' || entry.name === '.next' || entry.name === 'node_modules') continue
const fullPath = path.join(dir, entry.name)
if (entry.isDirectory()) {
walk(fullPath, out)
continue
}
if (sourceExtensions.has(path.extname(entry.name))) {
out.push(fullPath)
}
}
return out
}
function isAllowedFile(relPath) {
return allowedPrefixes.some((prefix) => relPath.startsWith(prefix))
}
function collectViolations(fullPath) {
const relPath = toRel(fullPath)
if (isAllowedFile(relPath)) return []
const content = fs.readFileSync(fullPath, 'utf8')
const lines = content.split('\n')
const violations = []
for (let i = 0; i < lines.length; i += 1) {
const line = lines[i]
if (/\bisInternalTaskExecution\b/.test(line)) {
violations.push(`${relPath}:${i + 1} forbidden dual-track fallback marker isInternalTaskExecution`)
}
if (/\bshouldRunSyncTask\s*\(/.test(line)) {
violations.push(`${relPath}:${i + 1} forbidden sync-mode branch helper shouldRunSyncTask`)
}
}
if (/\bmaybeSubmitLLMTask\s*\(/.test(content) && !/sync mode is disabled for this route/.test(content)) {
violations.push(`${relPath} missing explicit sync-disabled guard after maybeSubmitLLMTask`)
}
return violations
}
const allFiles = scanRoots.flatMap((scanRoot) => walk(path.join(root, scanRoot)))
const violations = allFiles.flatMap((fullPath) => collectViolations(fullPath))
if (violations.length > 0) {
fail('Found potential sync fallback or dual-track task branch in production API routes', violations)
}
console.log('[no-internal-task-sync-fallback] OK')

View File

@@ -0,0 +1,88 @@
#!/usr/bin/env node
import fs from 'fs'
import path from 'path'
import process from 'process'
const root = process.cwd()
const sourceExtensions = new Set(['.ts', '.tsx', '.js', '.jsx', '.mjs', '.cjs'])
const allowFactoryImportIn = new Set([
'src/lib/generator-api.ts',
'src/lib/generators/factory.ts',
])
function fail(title, details = []) {
console.error(`\n[no-media-provider-bypass] ${title}`)
for (const line of details) {
console.error(` - ${line}`)
}
process.exit(1)
}
function toRel(fullPath) {
return path.relative(root, fullPath).split(path.sep).join('/')
}
function walk(dir, out = []) {
if (!fs.existsSync(dir)) return out
const entries = fs.readdirSync(dir, { withFileTypes: true })
for (const entry of entries) {
if (entry.name === '.git' || entry.name === '.next' || entry.name === 'node_modules') continue
const fullPath = path.join(dir, entry.name)
if (entry.isDirectory()) {
walk(fullPath, out)
continue
}
if (sourceExtensions.has(path.extname(entry.name))) {
out.push(fullPath)
}
}
return out
}
const generatorApiPath = path.join(root, 'src/lib/generator-api.ts')
if (!fs.existsSync(generatorApiPath)) {
fail('Missing src/lib/generator-api.ts')
}
const generatorApiContent = fs.readFileSync(generatorApiPath, 'utf8')
const resolveModelSelectionHits = (generatorApiContent.match(/resolveModelSelection\s*\(/g) || []).length
if (resolveModelSelectionHits < 2) {
fail('generator-api must route both image and video generation through resolveModelSelection', [
'expected >= 2 resolveModelSelection(...) calls in src/lib/generator-api.ts',
])
}
const allFiles = walk(path.join(root, 'src'))
const violations = []
for (const fullPath of allFiles) {
const relPath = toRel(fullPath)
const content = fs.readFileSync(fullPath, 'utf8')
const lines = content.split('\n')
for (let i = 0; i < lines.length; i += 1) {
const line = lines[i]
if (
relPath !== 'src/lib/generators/factory.ts' &&
(/\bcreateImageGeneratorByModel\s*\(/.test(line) || /\bcreateVideoGeneratorByModel\s*\(/.test(line))
) {
violations.push(`${relPath}:${i + 1} forbidden provider-bypass factory call create*GeneratorByModel(...)`)
}
if ((/\bgetImageApiKey\s*\(/.test(line) || /\bgetVideoApiKey\s*\(/.test(line)) && relPath !== 'src/lib/api-config.ts') {
violations.push(`${relPath}:${i + 1} forbidden direct getImageApiKey/getVideoApiKey usage outside api-config`)
}
if (/from\s+['"]@\/lib\/generators\/factory['"]/.test(line) && !allowFactoryImportIn.has(relPath)) {
violations.push(`${relPath}:${i + 1} forbidden direct import from '@/lib/generators/factory' (must go through generator-api)`)
}
}
}
if (violations.length > 0) {
fail('Found media provider routing bypass', violations)
}
console.log('[no-media-provider-bypass] OK')

View File

@@ -0,0 +1,93 @@
#!/usr/bin/env node
import fs from 'fs'
import path from 'path'
import process from 'process'
const root = process.cwd()
const sourceExtensions = new Set(['.ts', '.tsx', '.js', '.jsx', '.mjs', '.cjs'])
const scanRoots = ['src/app', 'src/lib']
const modelFields = [
'analysisModel',
'characterModel',
'locationModel',
'storyboardModel',
'editModel',
'videoModel',
]
function fail(title, details = []) {
console.error(`\n[no-model-key-downgrade] ${title}`)
for (const line of details) {
console.error(` - ${line}`)
}
process.exit(1)
}
function toRel(fullPath) {
return path.relative(root, fullPath).split(path.sep).join('/')
}
function walk(dir, out = []) {
if (!fs.existsSync(dir)) return out
const entries = fs.readdirSync(dir, { withFileTypes: true })
for (const entry of entries) {
if (entry.name === '.git' || entry.name === '.next' || entry.name === 'node_modules') continue
const fullPath = path.join(dir, entry.name)
if (entry.isDirectory()) {
walk(fullPath, out)
continue
}
if (sourceExtensions.has(path.extname(entry.name))) {
out.push(fullPath)
}
}
return out
}
function collectViolations(filePath) {
const relPath = toRel(filePath)
const lines = fs.readFileSync(filePath, 'utf8').split('\n')
const violations = []
const modelFieldPattern = new RegExp(`\\b(${modelFields.join('|')})\\s*:\\s*[^,\\n]*\\bmodelId\\b`)
const optionModelIdPattern = /value=\{model\.modelId\}/
for (let index = 0; index < lines.length; index += 1) {
const line = lines[index]
if (modelFieldPattern.test(line)) {
violations.push(`${relPath}:${index + 1} default model field must persist model_key, not modelId`)
}
if (optionModelIdPattern.test(line)) {
violations.push(`${relPath}:${index + 1} UI option value must use modelKey, not model.modelId`)
}
}
return violations
}
function assertFileContains(relativePath, requiredSnippets) {
const fullPath = path.join(root, relativePath)
if (!fs.existsSync(fullPath)) {
fail('Missing required contract file', [relativePath])
}
const content = fs.readFileSync(fullPath, 'utf8')
const missing = requiredSnippets.filter((snippet) => !content.includes(snippet))
if (missing.length > 0) {
fail('Model key contract anchor missing', missing.map((snippet) => `${relativePath} missing: ${snippet}`))
}
}
const files = scanRoots.flatMap((scanRoot) => walk(path.join(root, scanRoot)))
const violations = files.flatMap((filePath) => collectViolations(filePath))
assertFileContains('src/lib/model-config-contract.ts', ['parseModelKeyStrict', 'markerIndex === -1) return null'])
assertFileContains('src/lib/config-service.ts', ['parseModelKeyStrict'])
assertFileContains('src/app/api/user/api-config/route.ts', ['validateDefaultModelKey', 'must be provider::modelId'])
assertFileContains('src/app/api/novel-promotion/[projectId]/route.ts', ['must be provider::modelId'])
if (violations.length > 0) {
fail('Found model key downgrade pattern', violations)
}
console.log('[no-model-key-downgrade] OK')

View File

@@ -0,0 +1,109 @@
#!/usr/bin/env node
import fs from 'fs'
import path from 'path'
import process from 'process'
const root = process.cwd()
const sourceExtensions = new Set(['.ts', '.tsx', '.js', '.jsx', '.mjs', '.cjs'])
const lineScanRoots = [
'src/app/[locale]/workspace/[projectId]/modes/novel-promotion',
'src/lib/query/hooks',
]
const fileScanRoots = [
'src/app/api/novel-promotion',
'src/lib/workers/handlers',
]
const lineRules = [
{
name: 'shadow state localStoryboards',
test: (line) => /const\s*\[\s*localStoryboards\s*,\s*setLocalStoryboards\s*\]\s*=\s*useState/.test(line),
},
{
name: 'shadow state localVoiceLines',
test: (line) => /const\s*\[\s*localVoiceLines\s*,\s*setLocalVoiceLines\s*\]\s*=\s*useState/.test(line),
},
{
name: 'hardcoded queryKey array',
test: (line) => /queryKey\s*:\s*\[/.test(line),
},
]
function fail(title, details = []) {
console.error(`\n[no-multiple-sources-of-truth] ${title}`)
for (const detail of details) {
console.error(` - ${detail}`)
}
process.exit(1)
}
function toRel(fullPath) {
return path.relative(root, fullPath).split(path.sep).join('/')
}
function walk(dir, out = []) {
if (!fs.existsSync(dir)) return out
const entries = fs.readdirSync(dir, { withFileTypes: true })
for (const entry of entries) {
if (entry.name === '.git' || entry.name === '.next' || entry.name === 'node_modules') continue
const fullPath = path.join(dir, entry.name)
if (entry.isDirectory()) {
walk(fullPath, out)
continue
}
if (sourceExtensions.has(path.extname(entry.name))) out.push(fullPath)
}
return out
}
function collectLineViolations(fullPath) {
const relPath = toRel(fullPath)
const content = fs.readFileSync(fullPath, 'utf8')
const lines = content.split('\n')
const violations = []
for (let i = 0; i < lines.length; i += 1) {
const line = lines[i]
for (const rule of lineRules) {
if (rule.test(line)) {
violations.push(`${relPath}:${i + 1} forbidden: ${rule.name}`)
}
}
}
return violations
}
function collectFileViolations(fullPath) {
const relPath = toRel(fullPath)
const content = fs.readFileSync(fullPath, 'utf8')
const violations = []
const updateCallRegex = /novelPromotionProject\.update\(\{[\s\S]*?\n\s*\}\)/g
for (const match of content.matchAll(updateCallRegex)) {
const block = match[0]
const hasStageWrite = /\bdata\s*:\s*\{[\s\S]*?\bstage\s*:/.test(block)
if (!hasStageWrite) continue
const before = content.slice(0, match.index ?? 0)
const lineNumber = before.split('\n').length
violations.push(`${relPath}:${lineNumber} forbidden: DB stage write in novelPromotionProject.update`)
}
return violations
}
const lineFiles = lineScanRoots.flatMap((scanRoot) => walk(path.join(root, scanRoot)))
const fileFiles = fileScanRoots.flatMap((scanRoot) => walk(path.join(root, scanRoot)))
const lineViolations = lineFiles.flatMap((fullPath) => collectLineViolations(fullPath))
const fileViolations = fileFiles.flatMap((fullPath) => collectFileViolations(fullPath))
const allViolations = [...lineViolations, ...fileViolations]
if (allViolations.length > 0) {
fail('Found multiple-sources-of-truth regressions', allViolations)
}
console.log('[no-multiple-sources-of-truth] OK')

View File

@@ -0,0 +1,95 @@
#!/usr/bin/env node
import fs from 'fs'
import path from 'path'
import process from 'process'
const root = process.cwd()
const sourceExtensions = new Set(['.ts', '.tsx', '.js', '.jsx', '.mjs', '.cjs'])
const scanRoots = ['src/lib', 'src/app/api']
const allowModelRegistryUsage = new Set()
function fail(title, details = []) {
console.error(`\n[no-provider-guessing] ${title}`)
for (const line of details) {
console.error(` - ${line}`)
}
process.exit(1)
}
function toRel(fullPath) {
return path.relative(root, fullPath).split(path.sep).join('/')
}
function walk(dir, out = []) {
if (!fs.existsSync(dir)) return out
const entries = fs.readdirSync(dir, { withFileTypes: true })
for (const entry of entries) {
if (entry.name === '.git' || entry.name === '.next' || entry.name === 'node_modules') continue
const fullPath = path.join(dir, entry.name)
if (entry.isDirectory()) {
walk(fullPath, out)
continue
}
if (sourceExtensions.has(path.extname(entry.name))) {
out.push(fullPath)
}
}
return out
}
const apiConfigPath = path.join(root, 'src/lib/api-config.ts')
if (!fs.existsSync(apiConfigPath)) {
fail('Missing src/lib/api-config.ts')
}
const legacyRegistryPath = path.join(root, 'src/lib/model-registry.ts')
if (fs.existsSync(legacyRegistryPath)) {
fail('Legacy runtime registry must be removed', ['src/lib/model-registry.ts'])
}
const apiConfigText = fs.readFileSync(apiConfigPath, 'utf8')
const forbiddenApiConfigTokens = [
'includeAnyType',
'crossTypeCandidates',
'matches multiple providers across media types',
]
const apiViolations = forbiddenApiConfigTokens
.filter((token) => apiConfigText.includes(token))
.map((token) => `src/lib/api-config.ts contains forbidden provider-guessing token: ${token}`)
// 验证 api-config.ts 使用严格 provider.id 精确匹配(不按 type 过滤,不做 providerKey 模糊匹配)
if (!apiConfigText.includes('pickProviderStrict(')) {
apiViolations.push('src/lib/api-config.ts missing strict provider resolution function (pickProviderStrict)')
}
const files = scanRoots.flatMap((scanRoot) => walk(path.join(root, scanRoot)))
const violations = [...apiViolations]
for (const fullPath of files) {
const relPath = toRel(fullPath)
const content = fs.readFileSync(fullPath, 'utf8')
const lines = content.split('\n')
for (let index = 0; index < lines.length; index += 1) {
const line = lines[index]
if (
/from\s+['"]@\/lib\/model-registry['"]/.test(line)
&& !allowModelRegistryUsage.has(relPath)
) {
violations.push(`${relPath}:${index + 1} forbidden model-registry import outside allowed boundary`)
}
if (
(/\bgetModelRegistryEntry\s*\(/.test(line) || /\blistRegisteredModels\s*\(/.test(line))
&& !allowModelRegistryUsage.has(relPath)
) {
violations.push(`${relPath}:${index + 1} forbidden model-registry runtime mapping usage`)
}
}
}
if (violations.length > 0) {
fail('Found provider guessing / registry mapping violation', violations)
}
console.log('[no-provider-guessing] OK')

View File

@@ -0,0 +1,81 @@
#!/usr/bin/env node
import fs from 'fs'
import path from 'path'
import process from 'process'
const root = process.cwd()
const scanRoots = [
'src/app/[locale]/workspace/[projectId]/modes/novel-promotion',
]
const sourceExtensions = new Set(['.ts', '.tsx', '.js', '.jsx', '.mjs', '.cjs'])
const forbiddenRules = [
{
name: 'localProject/localEpisode mirror state',
test: (line) => /\blocalProject\b|\blocalEpisode\b/.test(line),
},
{
name: 'server mirror useState(projectData.*)',
test: (line) => /useState\s*\(\s*projectData\./.test(line),
},
{
name: 'server mirror useState(episode?.*)',
test: (line) => /useState\s*\(\s*episode\?\./.test(line),
},
]
function fail(title, details = []) {
console.error(`\n[no-server-mirror-state] ${title}`)
for (const line of details) {
console.error(` - ${line}`)
}
process.exit(1)
}
function toRel(fullPath) {
return path.relative(root, fullPath).split(path.sep).join('/')
}
function walk(dir, out = []) {
if (!fs.existsSync(dir)) return out
const entries = fs.readdirSync(dir, { withFileTypes: true })
for (const entry of entries) {
if (entry.name === '.git' || entry.name === '.next' || entry.name === 'node_modules') continue
const fullPath = path.join(dir, entry.name)
if (entry.isDirectory()) {
walk(fullPath, out)
continue
}
const ext = path.extname(entry.name)
if (sourceExtensions.has(ext)) out.push(fullPath)
}
return out
}
function collectViolations(fullPath) {
const relPath = toRel(fullPath)
const content = fs.readFileSync(fullPath, 'utf8')
const lines = content.split('\n')
const violations = []
for (let i = 0; i < lines.length; i += 1) {
const line = lines[i]
for (const rule of forbiddenRules) {
if (rule.test(line)) {
violations.push(`${relPath}:${i + 1} forbidden: ${rule.name}`)
}
}
}
return violations
}
const allFiles = scanRoots.flatMap((scanRoot) => walk(path.join(root, scanRoot)))
const violations = allFiles.flatMap((fullPath) => collectViolations(fullPath))
if (violations.length > 0) {
fail('Found forbidden server mirror state patterns', violations)
}
console.log('[no-server-mirror-state] OK')

View File

@@ -0,0 +1,143 @@
#!/usr/bin/env node
import fs from 'fs'
import path from 'path'
import process from 'process'
const root = process.cwd()
const catalogPath = path.join(root, 'src', 'lib', 'prompt-i18n', 'catalog.ts')
const singlePlaceholderPattern = /\{([A-Za-z0-9_]+)\}/g
const doublePlaceholderPattern = /\{\{([A-Za-z0-9_]+)\}\}/g
const unresolvedPlaceholderPattern = /\{\{?[A-Za-z0-9_]+\}?\}/g
function fail(title, details = []) {
console.error(`\n[prompt-ab-regression] ${title}`)
for (const line of details) {
console.error(` - ${line}`)
}
process.exit(1)
}
function parseCatalog(text) {
const entries = []
const entryPattern = /pathStem:\s*'([^']+)'\s*,[\s\S]*?variableKeys:\s*\[([\s\S]*?)\]\s*,/g
for (const match of text.matchAll(entryPattern)) {
const pathStem = match[1]
const rawKeys = match[2] || ''
const keys = Array.from(rawKeys.matchAll(/'([^']+)'/g)).map((item) => item[1])
entries.push({ pathStem, variableKeys: keys })
}
return entries
}
function extractPlaceholders(template) {
const keys = new Set()
for (const match of template.matchAll(singlePlaceholderPattern)) {
if (match[1]) keys.add(match[1])
}
for (const match of template.matchAll(doublePlaceholderPattern)) {
if (match[1]) keys.add(match[1])
}
return Array.from(keys)
}
function replaceAll(template, variables) {
let rendered = template
for (const [key, value] of Object.entries(variables)) {
const pattern = new RegExp(`\\{\\{${key}\\}\\}|\\{${key}\\}`, 'g')
rendered = rendered.replace(pattern, value)
}
return rendered
}
function setDiff(left, right) {
const rightSet = new Set(right)
return left.filter((item) => !rightSet.has(item))
}
if (!fs.existsSync(catalogPath)) {
fail('catalog.ts not found', ['src/lib/prompt-i18n/catalog.ts'])
}
const catalogText = fs.readFileSync(catalogPath, 'utf8')
const entries = parseCatalog(catalogText)
if (entries.length === 0) {
fail('failed to parse prompt catalog entries')
}
const violations = []
for (const entry of entries) {
const zhPath = path.join(root, 'lib', 'prompts', `${entry.pathStem}.zh.txt`)
const enPath = path.join(root, 'lib', 'prompts', `${entry.pathStem}.en.txt`)
if (!fs.existsSync(zhPath)) {
violations.push(`missing zh template: lib/prompts/${entry.pathStem}.zh.txt`)
continue
}
if (!fs.existsSync(enPath)) {
violations.push(`missing en template: lib/prompts/${entry.pathStem}.en.txt`)
continue
}
const zhTemplate = fs.readFileSync(zhPath, 'utf8')
const enTemplate = fs.readFileSync(enPath, 'utf8')
const declared = entry.variableKeys
const zhPlaceholders = extractPlaceholders(zhTemplate)
const enPlaceholders = extractPlaceholders(enTemplate)
const missingInZh = setDiff(declared, zhPlaceholders)
const missingInEn = setDiff(declared, enPlaceholders)
const extraInZh = setDiff(zhPlaceholders, declared)
const extraInEn = setDiff(enPlaceholders, declared)
const zhOnly = setDiff(zhPlaceholders, enPlaceholders)
const enOnly = setDiff(enPlaceholders, zhPlaceholders)
for (const key of missingInZh) {
violations.push(`missing {${key}} in zh template: lib/prompts/${entry.pathStem}.zh.txt`)
}
for (const key of missingInEn) {
violations.push(`missing {${key}} in en template: lib/prompts/${entry.pathStem}.en.txt`)
}
for (const key of extraInZh) {
violations.push(`unexpected {${key}} in zh template: lib/prompts/${entry.pathStem}.zh.txt`)
}
for (const key of extraInEn) {
violations.push(`unexpected {${key}} in en template: lib/prompts/${entry.pathStem}.en.txt`)
}
for (const key of zhOnly) {
violations.push(`placeholder {${key}} exists only in zh template: ${entry.pathStem}`)
}
for (const key of enOnly) {
violations.push(`placeholder {${key}} exists only in en template: ${entry.pathStem}`)
}
const variables = Object.fromEntries(
declared.map((key) => [key, `__AB_SAMPLE_${key.toUpperCase()}__`]),
)
const renderedZh = replaceAll(zhTemplate, variables)
const renderedEn = replaceAll(enTemplate, variables)
const unresolvedZh = renderedZh.match(unresolvedPlaceholderPattern) || []
const unresolvedEn = renderedEn.match(unresolvedPlaceholderPattern) || []
if (unresolvedZh.length > 0) {
violations.push(`unresolved placeholders in zh template: ${entry.pathStem} -> ${unresolvedZh.join(', ')}`)
}
if (unresolvedEn.length > 0) {
violations.push(`unresolved placeholders in en template: ${entry.pathStem} -> ${unresolvedEn.join(', ')}`)
}
for (const [key, sample] of Object.entries(variables)) {
if (!renderedZh.includes(sample)) {
violations.push(`zh template variable not used after render: ${entry.pathStem}.{${key}}`)
}
if (!renderedEn.includes(sample)) {
violations.push(`en template variable not used after render: ${entry.pathStem}.{${key}}`)
}
}
}
if (violations.length > 0) {
fail('A/B regression check failed', violations)
}
console.log(`[prompt-ab-regression] OK (${entries.length} templates checked)`)

View File

@@ -0,0 +1,160 @@
#!/usr/bin/env node
import fs from 'fs'
import path from 'path'
import process from 'process'
const root = process.cwd()
const sourceExtensions = new Set(['.ts', '.tsx', '.js', '.jsx', '.mjs', '.cjs'])
const scanRoots = ['src', 'scripts']
const allowedPromptTemplateReaders = new Set([
'src/lib/prompt-i18n/template-store.ts',
'scripts/guards/prompt-i18n-guard.mjs',
'scripts/guards/prompt-semantic-regression.mjs',
'scripts/guards/prompt-ab-regression.mjs',
'scripts/guards/prompt-json-canary-guard.mjs',
])
const languageDirectiveAllowList = new Set([
'scripts/guards/prompt-i18n-guard.mjs',
])
const languageDirectivePattern = /请用中文|中文输出|use Chinese|output in Chinese/i
function fail(title, details = []) {
console.error(`\n[prompt-i18n-guard] ${title}`)
for (const line of details) {
console.error(` - ${line}`)
}
process.exit(1)
}
function toRel(fullPath) {
return path.relative(root, fullPath).split(path.sep).join('/')
}
function walk(dir, out = []) {
if (!fs.existsSync(dir)) return out
const entries = fs.readdirSync(dir, { withFileTypes: true })
for (const entry of entries) {
if (entry.name === '.git' || entry.name === '.next' || entry.name === 'node_modules') continue
const fullPath = path.join(dir, entry.name)
if (entry.isDirectory()) {
walk(fullPath, out)
continue
}
out.push(fullPath)
}
return out
}
function listSourceFiles() {
return scanRoots
.flatMap((scanRoot) => walk(path.join(root, scanRoot)))
.filter((fullPath) => sourceExtensions.has(path.extname(fullPath)))
}
function collectDirectPromptReadViolations() {
const violations = []
const files = listSourceFiles()
for (const filePath of files) {
const relPath = toRel(filePath)
if (allowedPromptTemplateReaders.has(relPath)) continue
const content = fs.readFileSync(filePath, 'utf8')
const hasReadFileSync = /\breadFileSync\s*\(/.test(content)
if (!hasReadFileSync) continue
const hasPromptPathToken =
content.includes('lib/prompts')
|| (
/['"]lib['"]/.test(content)
&& /['"]prompts['"]/.test(content)
)
if (hasPromptPathToken) {
violations.push(`${relPath} direct prompt file read is forbidden; use buildPrompt/getPromptTemplate`)
}
}
return violations
}
function collectLanguageDirectiveViolations() {
const violations = []
for (const filePath of listSourceFiles()) {
const relPath = toRel(filePath)
if (languageDirectiveAllowList.has(relPath)) continue
const lines = fs.readFileSync(filePath, 'utf8').split('\n')
for (let index = 0; index < lines.length; index += 1) {
const line = lines[index]
if (languageDirectivePattern.test(line)) {
violations.push(`${relPath}:${index + 1} hardcoded language directive is forbidden`)
}
}
}
const promptFiles = walk(path.join(root, 'lib', 'prompts'))
.filter((fullPath) => fullPath.endsWith('.en.txt'))
for (const filePath of promptFiles) {
const relPath = toRel(filePath)
const lines = fs.readFileSync(filePath, 'utf8').split('\n')
for (let index = 0; index < lines.length; index += 1) {
const line = lines[index]
if (languageDirectivePattern.test(line)) {
violations.push(`${relPath}:${index + 1} English template cannot require Chinese output`)
}
}
}
return violations
}
function collectLegacyPromptFiles() {
return walk(path.join(root, 'lib', 'prompts'))
.map((fullPath) => toRel(fullPath))
.filter((relPath) => relPath.endsWith('.txt') && !relPath.endsWith('.zh.txt') && !relPath.endsWith('.en.txt'))
}
function verifyPromptCatalogCoverage() {
const catalogPath = path.join(root, 'src', 'lib', 'prompt-i18n', 'catalog.ts')
if (!fs.existsSync(catalogPath)) {
fail('Missing prompt catalog file', ['src/lib/prompt-i18n/catalog.ts'])
}
const catalogText = fs.readFileSync(catalogPath, 'utf8')
const stems = Array.from(catalogText.matchAll(/pathStem:\s*'([^']+)'/g)).map((match) => match[1])
if (stems.length === 0) {
fail('No prompt pathStem found in catalog.ts')
}
const missing = []
for (const stem of stems) {
const zhPath = path.join(root, 'lib', 'prompts', `${stem}.zh.txt`)
const enPath = path.join(root, 'lib', 'prompts', `${stem}.en.txt`)
if (!fs.existsSync(zhPath)) {
missing.push(`missing zh template: lib/prompts/${stem}.zh.txt`)
}
if (!fs.existsSync(enPath)) {
missing.push(`missing en template: lib/prompts/${stem}.en.txt`)
}
}
if (missing.length > 0) {
fail('Prompt template coverage check failed', missing)
}
}
const legacyPromptFiles = collectLegacyPromptFiles()
if (legacyPromptFiles.length > 0) {
fail('Legacy prompt files found (.txt without locale suffix)', legacyPromptFiles)
}
verifyPromptCatalogCoverage()
const promptReadViolations = collectDirectPromptReadViolations()
if (promptReadViolations.length > 0) {
fail('Found direct prompt template reads', promptReadViolations)
}
const languageViolations = collectLanguageDirectiveViolations()
if (languageViolations.length > 0) {
fail('Found hardcoded language directives', languageViolations)
}
console.log('[prompt-i18n-guard] OK')

View File

@@ -0,0 +1,250 @@
#!/usr/bin/env node
import fs from 'fs'
import path from 'path'
import process from 'process'
const root = process.cwd()
const CANARY_FILES = {
clips: 'standards/prompt-canary/story_to_script_clips.canary.json',
screenplay: 'standards/prompt-canary/screenplay_conversion.canary.json',
storyboardPanels: 'standards/prompt-canary/storyboard_panels.canary.json',
voiceAnalysis: 'standards/prompt-canary/voice_analysis.canary.json',
}
const TEMPLATE_TOKEN_REQUIREMENTS = {
'novel-promotion/agent_clip': ['start', 'end', 'summary', 'location', 'characters'],
'novel-promotion/screenplay_conversion': [
'clip_id',
'original_text',
'scenes',
'heading',
'content',
'type',
'action',
'dialogue',
'voiceover',
],
'novel-promotion/agent_storyboard_plan': [
'panel_number',
'description',
'characters',
'location',
'scene_type',
'source_text',
],
'novel-promotion/agent_storyboard_detail': [
'panel_number',
'description',
'characters',
'location',
'scene_type',
'source_text',
'shot_type',
'camera_move',
'video_prompt',
],
'novel-promotion/agent_storyboard_insert': [
'panel_number',
'description',
'characters',
'location',
'scene_type',
'source_text',
'shot_type',
'camera_move',
'video_prompt',
],
'novel-promotion/voice_analysis': [
'lineIndex',
'speaker',
'content',
'emotionStrength',
'matchedPanel',
'storyboardId',
'panelIndex',
],
}
function fail(title, details = []) {
console.error(`\n[prompt-json-canary-guard] ${title}`)
for (const line of details) {
console.error(` - ${line}`)
}
process.exit(1)
}
function isRecord(value) {
return !!value && typeof value === 'object' && !Array.isArray(value)
}
function isString(value) {
return typeof value === 'string'
}
function isNumber(value) {
return typeof value === 'number' && Number.isFinite(value)
}
function readJson(relativePath) {
const fullPath = path.join(root, relativePath)
if (!fs.existsSync(fullPath)) {
fail('Missing canary fixture', [relativePath])
}
try {
return JSON.parse(fs.readFileSync(fullPath, 'utf8'))
} catch (error) {
fail('Invalid canary fixture JSON', [`${relativePath}: ${error instanceof Error ? error.message : String(error)}`])
}
}
function validateClipCanary(value) {
if (!Array.isArray(value) || value.length === 0) return 'clips fixture must be a non-empty array'
for (let i = 0; i < value.length; i += 1) {
const row = value[i]
if (!isRecord(row)) return `clips[${i}] must be an object`
if (!isString(row.start) || row.start.length < 5) return `clips[${i}].start must be string length >= 5`
if (!isString(row.end) || row.end.length < 5) return `clips[${i}].end must be string length >= 5`
if (!isString(row.summary) || row.summary.length === 0) return `clips[${i}].summary must be non-empty string`
if (!(row.location === null || isString(row.location))) return `clips[${i}].location must be string or null`
if (!Array.isArray(row.characters) || !row.characters.every((item) => isString(item))) {
return `clips[${i}].characters must be string array`
}
}
return null
}
function validateScreenplayCanary(value) {
if (!isRecord(value)) return 'screenplay fixture must be an object'
if (!isString(value.clip_id) || !value.clip_id) return 'screenplay.clip_id must be non-empty string'
if (!isString(value.original_text)) return 'screenplay.original_text must be string'
if (!Array.isArray(value.scenes) || value.scenes.length === 0) return 'screenplay.scenes must be non-empty array'
for (let i = 0; i < value.scenes.length; i += 1) {
const scene = value.scenes[i]
if (!isRecord(scene)) return `screenplay.scenes[${i}] must be object`
if (!isNumber(scene.scene_number)) return `screenplay.scenes[${i}].scene_number must be number`
if (!isRecord(scene.heading)) return `screenplay.scenes[${i}].heading must be object`
if (!isString(scene.heading.int_ext)) return `screenplay.scenes[${i}].heading.int_ext must be string`
if (!isString(scene.heading.location)) return `screenplay.scenes[${i}].heading.location must be string`
if (!isString(scene.heading.time)) return `screenplay.scenes[${i}].heading.time must be string`
if (!isString(scene.description)) return `screenplay.scenes[${i}].description must be string`
if (!Array.isArray(scene.characters) || !scene.characters.every((item) => isString(item))) {
return `screenplay.scenes[${i}].characters must be string array`
}
if (!Array.isArray(scene.content) || scene.content.length === 0) return `screenplay.scenes[${i}].content must be non-empty array`
for (let j = 0; j < scene.content.length; j += 1) {
const segment = scene.content[j]
if (!isRecord(segment)) return `screenplay.scenes[${i}].content[${j}] must be object`
if (!isString(segment.type)) return `screenplay.scenes[${i}].content[${j}].type must be string`
if (segment.type === 'action') {
if (!isString(segment.text)) return `screenplay action[${i}:${j}].text must be string`
} else if (segment.type === 'dialogue') {
if (!isString(segment.character)) return `screenplay dialogue[${i}:${j}].character must be string`
if (!isString(segment.lines)) return `screenplay dialogue[${i}:${j}].lines must be string`
if (segment.parenthetical !== undefined && !isString(segment.parenthetical)) {
return `screenplay dialogue[${i}:${j}].parenthetical must be string when present`
}
} else if (segment.type === 'voiceover') {
if (!isString(segment.text)) return `screenplay voiceover[${i}:${j}].text must be string`
if (segment.character !== undefined && !isString(segment.character)) {
return `screenplay voiceover[${i}:${j}].character must be string when present`
}
} else {
return `screenplay.scenes[${i}].content[${j}].type must be action/dialogue/voiceover`
}
}
}
return null
}
function validateStoryboardPanelsCanary(value) {
if (!Array.isArray(value) || value.length === 0) return 'storyboard panels fixture must be non-empty array'
for (let i = 0; i < value.length; i += 1) {
const panel = value[i]
if (!isRecord(panel)) return `storyboardPanels[${i}] must be object`
if (!isNumber(panel.panel_number)) return `storyboardPanels[${i}].panel_number must be number`
if (!isString(panel.description)) return `storyboardPanels[${i}].description must be string`
if (!isString(panel.location)) return `storyboardPanels[${i}].location must be string`
if (!isString(panel.scene_type)) return `storyboardPanels[${i}].scene_type must be string`
if (!isString(panel.source_text)) return `storyboardPanels[${i}].source_text must be string`
if (!isString(panel.shot_type)) return `storyboardPanels[${i}].shot_type must be string`
if (!isString(panel.camera_move)) return `storyboardPanels[${i}].camera_move must be string`
if (!isString(panel.video_prompt)) return `storyboardPanels[${i}].video_prompt must be string`
if (panel.duration !== undefined && !isNumber(panel.duration)) return `storyboardPanels[${i}].duration must be number when present`
if (!Array.isArray(panel.characters)) return `storyboardPanels[${i}].characters must be array`
for (let j = 0; j < panel.characters.length; j += 1) {
const character = panel.characters[j]
if (!isRecord(character)) return `storyboardPanels[${i}].characters[${j}] must be object`
if (!isString(character.name)) return `storyboardPanels[${i}].characters[${j}].name must be string`
if (character.appearance !== undefined && !isString(character.appearance)) {
return `storyboardPanels[${i}].characters[${j}].appearance must be string when present`
}
}
}
return null
}
function validateVoiceAnalysisCanary(value) {
if (!Array.isArray(value) || value.length === 0) return 'voice analysis fixture must be non-empty array'
for (let i = 0; i < value.length; i += 1) {
const row = value[i]
if (!isRecord(row)) return `voiceAnalysis[${i}] must be object`
if (!isNumber(row.lineIndex)) return `voiceAnalysis[${i}].lineIndex must be number`
if (!isString(row.speaker)) return `voiceAnalysis[${i}].speaker must be string`
if (!isString(row.content)) return `voiceAnalysis[${i}].content must be string`
if (!isNumber(row.emotionStrength)) return `voiceAnalysis[${i}].emotionStrength must be number`
if (row.matchedPanel !== null) {
if (!isRecord(row.matchedPanel)) return `voiceAnalysis[${i}].matchedPanel must be object or null`
if (!isString(row.matchedPanel.storyboardId)) return `voiceAnalysis[${i}].matchedPanel.storyboardId must be string`
if (!isNumber(row.matchedPanel.panelIndex)) return `voiceAnalysis[${i}].matchedPanel.panelIndex must be number`
}
}
return null
}
function checkTemplateTokens(pathStem, requiredTokens) {
const violations = []
for (const locale of ['zh', 'en']) {
const relPath = `lib/prompts/${pathStem}.${locale}.txt`
const fullPath = path.join(root, relPath)
if (!fs.existsSync(fullPath)) {
violations.push(`missing template: ${relPath}`)
continue
}
const content = fs.readFileSync(fullPath, 'utf8')
for (const token of requiredTokens) {
if (!content.includes(token)) {
violations.push(`missing token ${token} in ${relPath}`)
}
}
}
return violations
}
const violations = []
const clipsErr = validateClipCanary(readJson(CANARY_FILES.clips))
if (clipsErr) violations.push(clipsErr)
const screenplayErr = validateScreenplayCanary(readJson(CANARY_FILES.screenplay))
if (screenplayErr) violations.push(screenplayErr)
const panelsErr = validateStoryboardPanelsCanary(readJson(CANARY_FILES.storyboardPanels))
if (panelsErr) violations.push(panelsErr)
const voiceErr = validateVoiceAnalysisCanary(readJson(CANARY_FILES.voiceAnalysis))
if (voiceErr) violations.push(voiceErr)
for (const [pathStem, requiredTokens] of Object.entries(TEMPLATE_TOKEN_REQUIREMENTS)) {
violations.push(...checkTemplateTokens(pathStem, requiredTokens))
}
if (violations.length > 0) {
fail('JSON schema canary check failed', violations)
}
console.log('[prompt-json-canary-guard] OK')

View File

@@ -0,0 +1,108 @@
#!/usr/bin/env node
import fs from 'fs'
import path from 'path'
import process from 'process'
const root = process.cwd()
const catalogPath = path.join(root, 'src', 'lib', 'prompt-i18n', 'catalog.ts')
const chineseCharPattern = /[\p{Script=Han}]/u
const singlePlaceholderPattern = /\{([A-Za-z0-9_]+)\}/g
const doublePlaceholderPattern = /\{\{([A-Za-z0-9_]+)\}\}/g
const criticalTemplateTokens = new Map([
['novel-promotion/voice_analysis', ['"lineIndex"', '"speaker"', '"content"', '"emotionStrength"', '"matchedPanel"']],
['novel-promotion/agent_storyboard_plan', ['"panel_number"', '"description"', '"characters"', '"location"', '"scene_type"', '"source_text"', '"shot_type"', '"camera_move"', '"video_prompt"']],
['novel-promotion/agent_storyboard_detail', ['"panel_number"', '"description"', '"characters"', '"location"', '"scene_type"', '"source_text"', '"shot_type"', '"camera_move"', '"video_prompt"']],
['novel-promotion/agent_storyboard_insert', ['"panel_number"', '"description"', '"characters"', '"location"', '"scene_type"', '"source_text"', '"shot_type"', '"camera_move"', '"video_prompt"']],
['novel-promotion/screenplay_conversion', ['"clip_id"', '"scenes"', '"heading"', '"content"', '"dialogue"', '"voiceover"']],
['novel-promotion/select_location', ['"locations"', '"name"', '"summary"', '"descriptions"']],
['novel-promotion/episode_split', ['"analysis"', '"episodes"', '"startMarker"', '"endMarker"', '"validation"']],
['novel-promotion/image_prompt_modify', ['"image_prompt"', '"video_prompt"']],
['novel-promotion/character_create', ['"prompt"']],
['novel-promotion/location_create', ['"prompt"']],
])
function fail(title, details = []) {
console.error(`\n[prompt-semantic-regression] ${title}`)
for (const line of details) {
console.error(` - ${line}`)
}
process.exit(1)
}
function parseCatalog(text) {
const entries = []
const entryPattern = /pathStem:\s*'([^']+)'\s*,[\s\S]*?variableKeys:\s*\[([\s\S]*?)\]\s*,/g
for (const match of text.matchAll(entryPattern)) {
const pathStem = match[1]
const rawKeys = match[2] || ''
const keys = Array.from(rawKeys.matchAll(/'([^']+)'/g)).map((item) => item[1])
entries.push({ pathStem, variableKeys: keys })
}
return entries
}
function extractPlaceholders(template) {
const keys = new Set()
for (const match of template.matchAll(singlePlaceholderPattern)) {
if (match[1]) keys.add(match[1])
}
for (const match of template.matchAll(doublePlaceholderPattern)) {
if (match[1]) keys.add(match[1])
}
return Array.from(keys)
}
if (!fs.existsSync(catalogPath)) {
fail('catalog.ts not found', ['src/lib/prompt-i18n/catalog.ts'])
}
const catalogText = fs.readFileSync(catalogPath, 'utf8')
const entries = parseCatalog(catalogText)
if (entries.length === 0) {
fail('failed to parse prompt catalog entries')
}
const violations = []
for (const entry of entries) {
const templatePath = path.join(root, 'lib', 'prompts', `${entry.pathStem}.en.txt`)
if (!fs.existsSync(templatePath)) {
violations.push(`missing template: lib/prompts/${entry.pathStem}.en.txt`)
continue
}
const template = fs.readFileSync(templatePath, 'utf8')
if (chineseCharPattern.test(template)) {
violations.push(`unexpected Chinese content in English template: lib/prompts/${entry.pathStem}.en.txt`)
}
const placeholders = extractPlaceholders(template)
const placeholderSet = new Set(placeholders)
const variableKeySet = new Set(entry.variableKeys)
for (const key of entry.variableKeys) {
if (!placeholderSet.has(key)) {
violations.push(`missing placeholder {${key}} in lib/prompts/${entry.pathStem}.en.txt`)
}
}
for (const key of placeholders) {
if (!variableKeySet.has(key)) {
violations.push(`unexpected placeholder {${key}} in lib/prompts/${entry.pathStem}.en.txt`)
}
}
const requiredTokens = criticalTemplateTokens.get(entry.pathStem) || []
for (const token of requiredTokens) {
if (!template.includes(token)) {
violations.push(`missing semantic token ${token} in lib/prompts/${entry.pathStem}.en.txt`)
}
}
}
if (violations.length > 0) {
fail('semantic regression check failed', violations)
}
console.log(`[prompt-semantic-regression] OK (${entries.length} templates checked)`)

View File

@@ -0,0 +1,9 @@
{
"allowedDirectTaskStateUsageFiles": [
"src/lib/query/hooks/useTaskTargetStates.ts",
"src/lib/query/hooks/useTaskPresentation.ts",
"src/lib/query/hooks/useProjectAssets.ts",
"src/lib/query/hooks/useGlobalAssets.ts"
],
"allowedLegacyGeneratingUsageFiles": []
}

View File

@@ -0,0 +1,132 @@
#!/usr/bin/env node
import fs from 'fs'
import path from 'path'
import process from 'process'
const workspaceRoot = process.cwd()
const baselinePath = path.join(workspaceRoot, 'scripts/guards/task-loading-baseline.json')
function walkFiles(dir, out = []) {
const entries = fs.readdirSync(dir, { withFileTypes: true })
for (const entry of entries) {
if (entry.name === 'node_modules' || entry.name === '.git' || entry.name === '.next') continue
const fullPath = path.join(dir, entry.name)
if (entry.isDirectory()) {
walkFiles(fullPath, out)
} else {
out.push(fullPath)
}
}
return out
}
function toPosixRelative(filePath) {
return path.relative(workspaceRoot, filePath).split(path.sep).join('/')
}
function collectMatches(files, pattern) {
const matches = []
for (const fullPath of files) {
if (!fullPath.endsWith('.ts') && !fullPath.endsWith('.tsx')) continue
const relPath = toPosixRelative(fullPath)
const content = fs.readFileSync(fullPath, 'utf8')
const lines = content.split('\n')
for (let i = 0; i < lines.length; i += 1) {
if (lines[i].includes(pattern)) {
matches.push(`${relPath}:${i + 1}`)
}
}
}
return matches
}
function fail(title, lines) {
console.error(`\n[task-loading-guard] ${title}`)
for (const line of lines) {
console.error(` - ${line}`)
}
process.exit(1)
}
if (!fs.existsSync(baselinePath)) {
fail('Missing baseline file', [toPosixRelative(baselinePath)])
}
const baseline = JSON.parse(fs.readFileSync(baselinePath, 'utf8'))
const allowedFiles = new Set(baseline.allowedDirectTaskStateUsageFiles || [])
const allowedLegacyGeneratingFiles = new Set(baseline.allowedLegacyGeneratingUsageFiles || [])
const allFiles = walkFiles(path.join(workspaceRoot, 'src'))
const directTaskStateUsage = collectMatches(allFiles, 'useTaskTargetStates(')
const directUsageOutOfAllowlist = directTaskStateUsage
.map((entry) => entry.split(':')[0])
.filter((file) => !allowedFiles.has(file))
if (directUsageOutOfAllowlist.length > 0) {
fail(
'Found component-level direct useTaskTargetStates outside baseline allowlist',
Array.from(new Set(directUsageOutOfAllowlist)),
)
}
const crossDomainLabels = collectMatches(allFiles, 'video.panelCard.generating')
if (crossDomainLabels.length > 0) {
fail('Found cross-domain loading label reuse (video.panelCard.generating)', crossDomainLabels)
}
const uiFiles = allFiles.filter((file) => {
const relPath = toPosixRelative(file)
return relPath.startsWith('src/app/') || relPath.startsWith('src/components/')
})
const legacyGeneratingPatterns = [
'appearance.generating',
'panel.generatingImage',
'shot.generatingImage',
'line.generating',
]
const legacyGeneratingMatches = legacyGeneratingPatterns.flatMap((pattern) =>
collectMatches(uiFiles, pattern),
)
const legacyGeneratingOutOfAllowlist = legacyGeneratingMatches
.map((entry) => entry.split(':')[0])
.filter((file) => !allowedLegacyGeneratingFiles.has(file))
if (legacyGeneratingOutOfAllowlist.length > 0) {
fail(
'Found legacy generating truth usage in UI components',
Array.from(new Set(legacyGeneratingOutOfAllowlist)),
)
}
const hooksIndexPath = path.join(workspaceRoot, 'src/lib/query/hooks/index.ts')
if (fs.existsSync(hooksIndexPath)) {
const hooksIndex = fs.readFileSync(hooksIndexPath, 'utf8')
const bannedReexports = [
{
pattern: /export\s*\{[^}]*useGenerateCharacterImage[^}]*\}\s*from\s*['"]\.\/useGlobalAssets['"]/m,
message: 'hooks/index.ts must not export useGenerateCharacterImage from useGlobalAssets',
},
{
pattern: /export\s*\{[^}]*useGenerateLocationImage[^}]*\}\s*from\s*['"]\.\/useGlobalAssets['"]/m,
message: 'hooks/index.ts must not export useGenerateLocationImage from useGlobalAssets',
},
{
pattern: /export\s*\{[^}]*useGenerateProjectCharacterImage[^}]*\}\s*from\s*['"]\.\/useProjectAssets['"]/m,
message: 'hooks/index.ts must not export useGenerateProjectCharacterImage from useProjectAssets',
},
{
pattern: /export\s*\{[^}]*useGenerateProjectLocationImage[^}]*\}\s*from\s*['"]\.\/useProjectAssets['"]/m,
message: 'hooks/index.ts must not export useGenerateProjectLocationImage from useProjectAssets',
},
]
const violations = bannedReexports
.filter((item) => item.pattern.test(hooksIndex))
.map((item) => item.message)
if (violations.length > 0) {
fail('Found non-canonical mutation re-exports', violations)
}
}
console.log('[task-loading-guard] OK')

View File

@@ -0,0 +1,42 @@
#!/usr/bin/env bash
set -euo pipefail
failed=0
check_absent() {
local label="$1"
local pattern="$2"
shift 2
local output
output="$(git grep --untracked -nE "$pattern" -- "$@" || true)"
if [[ -n "$output" ]]; then
echo "$output"
echo "::error title=${label}::${label}"
failed=1
fi
}
check_absent \
"Do not branch UI status on cancelled" \
"status[[:space:]]*===[[:space:]]*['\\\"]cancelled['\\\"]|status[[:space:]]*==[[:space:]]*['\\\"]cancelled['\\\"]" \
src/app \
src/components \
src/features \
src/lib/query
check_absent \
"useTaskHandoff is forbidden" \
"useTaskHandoff" \
src
check_absent \
"Do not use legacy task hooks in app layer" \
"useActiveTasks\\(|useTaskStatus\\(" \
src/app \
src/features
if [[ "$failed" -ne 0 ]]; then
exit 1
fi
echo "task-state-unification guard passed"

View File

@@ -0,0 +1,100 @@
#!/usr/bin/env bash
set -euo pipefail
ROOT_DIR="$(git rev-parse --show-toplevel)"
cd "$ROOT_DIR"
FAILED=0
print_header() {
echo
echo "============================================================"
echo "$1"
echo "============================================================"
}
print_ok() {
echo "[PASS] $1"
}
print_fail() {
echo "[FAIL] $1"
}
run_zero_match_check() {
local title="$1"
local pattern="$2"
shift 2
local paths=("$@")
local output
output="$(git grep -n -E "$pattern" -- "${paths[@]}" || true)"
if [[ -z "$output" ]]; then
print_ok "$title"
else
print_fail "$title"
echo "$output"
FAILED=1
fi
}
run_usetasktargetstates_check() {
local title="useTaskTargetStates 仅允许在 useProjectAssets/useGlobalAssets 中使用"
local output
output="$(git grep -n "useTaskTargetStates" -- src || true)"
if [[ -z "$output" ]]; then
print_ok "$title (当前 0 命中)"
return
fi
local filtered
filtered="$(echo "$output" | grep -v "src/lib/query/hooks/useProjectAssets.ts" | grep -v "src/lib/query/hooks/useGlobalAssets.ts" || true)"
if [[ -z "$filtered" ]]; then
print_ok "$title"
else
print_fail "$title"
echo "$filtered"
FAILED=1
fi
}
print_header "Task Status Cutover Audit"
run_zero_match_check \
"禁止 useTaskHandoff" \
"useTaskHandoff" \
src
run_zero_match_check \
"禁止 manualRegeneratingItems/setRegeneratingItems/clearRegeneratingItem" \
"manualRegeneratingItems|setRegeneratingItems|clearRegeneratingItem" \
src
run_zero_match_check \
"禁止业务层直接判断 status ===/!== cancelled" \
"status\\s*===\\s*['\\\"]cancelled['\\\"]|status\\s*!==\\s*['\\\"]cancelled['\\\"]" \
src
run_zero_match_check \
"禁止 generatingImage/generatingVideo/generatingLipSync 字段" \
"\\bgeneratingImage\\b|\\bgeneratingVideo\\b|\\bgeneratingLipSync\\b" \
src
run_usetasktargetstates_check
run_zero_match_check \
"禁止 novel-promotion/asset-hub/shared-assets 中 useState(false) 作为生成态命名" \
"const \\[[^\\]]*(Generating|Regenerating|WaitingForGeneration|AnalyzingAssets|GeneratingAll|CopyingFromGlobal)[^\\]]*\\]\\s*=\\s*useState\\(false\\)" \
"src/app/[locale]/workspace/[projectId]/modes/novel-promotion" \
"src/app/[locale]/workspace/asset-hub" \
"src/components/shared/assets"
print_header "Audit Result"
if [[ "$FAILED" -eq 0 ]]; then
echo "All checks passed."
exit 0
fi
echo "Audit failed. Please fix findings above."
exit 1

View File

@@ -0,0 +1,96 @@
#!/usr/bin/env node
import fs from 'fs'
import path from 'path'
import process from 'process'
const root = process.cwd()
function fail(title, details = []) {
console.error(`\n[task-target-states-no-polling-guard] ${title}`)
for (const line of details) {
console.error(` - ${line}`)
}
process.exit(1)
}
function readFile(relativePath) {
const fullPath = path.join(root, relativePath)
if (!fs.existsSync(fullPath)) {
fail('Missing required file', [relativePath])
}
return fs.readFileSync(fullPath, 'utf8')
}
function walk(dir, out = []) {
const entries = fs.readdirSync(dir, { withFileTypes: true })
for (const entry of entries) {
if (entry.name === '.git' || entry.name === '.next' || entry.name === 'node_modules') continue
const full = path.join(dir, entry.name)
if (entry.isDirectory()) {
walk(full, out)
} else {
out.push(full)
}
}
return out
}
function toRel(fullPath) {
return path.relative(root, fullPath).split(path.sep).join('/')
}
function collectPattern(pattern) {
const files = walk(path.join(root, 'src'))
const hits = []
for (const fullPath of files) {
if (!fullPath.endsWith('.ts') && !fullPath.endsWith('.tsx')) continue
const text = fs.readFileSync(fullPath, 'utf8')
const lines = text.split('\n')
for (let i = 0; i < lines.length; i += 1) {
if (pattern.test(lines[i])) {
hits.push(`${toRel(fullPath)}:${i + 1}`)
}
}
}
return hits
}
const refetchIntervalMsHits = collectPattern(/\brefetchIntervalMs\b/)
if (refetchIntervalMsHits.length > 0) {
fail('Found forbidden refetchIntervalMs usage', refetchIntervalMsHits)
}
const voiceStagePath =
'src/app/[locale]/workspace/[projectId]/modes/novel-promotion/components/VoiceStage.tsx'
const voiceStageText = readFile(voiceStagePath)
if (voiceStageText.includes('setInterval(')) {
fail('VoiceStage must not use timer polling', [voiceStagePath])
}
const targetStateMapPath = 'src/lib/query/hooks/useTaskTargetStateMap.ts'
const targetStateMapText = readFile(targetStateMapPath)
if (!/refetchInterval:\s*false/.test(targetStateMapText)) {
fail('useTaskTargetStateMap must keep refetchInterval disabled', [targetStateMapPath])
}
const ssePath = 'src/lib/query/hooks/useSSE.ts'
const sseText = readFile(ssePath)
const targetStatesInvalidateExprMatch = sseText.match(
/const shouldInvalidateTargetStates\s*=\s*([\s\S]*?)\n\s*\n/,
)
if (!targetStatesInvalidateExprMatch) {
fail('Unable to locate shouldInvalidateTargetStates expression', [ssePath])
}
const targetStatesInvalidateExpr = targetStatesInvalidateExprMatch[1]
if (!/TASK_EVENT_TYPE\.COMPLETED/.test(targetStatesInvalidateExpr) || !/TASK_EVENT_TYPE\.FAILED/.test(targetStatesInvalidateExpr)) {
fail('useSSE must invalidate target states only for terminal events', [ssePath])
}
if (/TASK_EVENT_TYPE\.CREATED/.test(targetStatesInvalidateExpr)) {
fail('useSSE target-state invalidation must not include CREATED', [ssePath])
}
if (/TASK_EVENT_TYPE\.PROCESSING/.test(targetStatesInvalidateExpr)) {
fail('useSSE target-state invalidation must not include PROCESSING', [ssePath])
}
console.log('[task-target-states-no-polling-guard] OK')

View File

@@ -0,0 +1,82 @@
#!/usr/bin/env node
import fs from 'fs'
import path from 'path'
const root = process.cwd()
const targetDirs = [
path.join(root, 'tests', 'integration', 'api', 'contract'),
path.join(root, 'tests', 'integration', 'chain'),
]
function fail(title, details = []) {
console.error(`\n[test-behavior-quality-guard] ${title}`)
for (const detail of details) {
console.error(` - ${detail}`)
}
process.exit(1)
}
function walk(dir, out = []) {
if (!fs.existsSync(dir)) return out
const entries = fs.readdirSync(dir, { withFileTypes: true })
for (const entry of entries) {
if (entry.name === '.git' || entry.name === 'node_modules') continue
const full = path.join(dir, entry.name)
if (entry.isDirectory()) {
walk(full, out)
continue
}
if (entry.isFile() && entry.name.endsWith('.test.ts')) out.push(full)
}
return out
}
function toRel(fullPath) {
return path.relative(root, fullPath).split(path.sep).join('/')
}
const files = targetDirs.flatMap((dir) => walk(dir))
if (files.length === 0) {
fail('No target test files found', targetDirs.map((dir) => toRel(dir)))
}
const violations = []
for (const file of files) {
const rel = toRel(file)
const text = fs.readFileSync(file, 'utf8')
const hasSourceRead = /(readFileSync|fs\.readFileSync)\s*\([\s\S]{0,240}src\/(app|lib)\//m.test(text)
if (hasSourceRead) {
violations.push(`${rel}: reading source code text is forbidden in behavior contract/chain tests`)
}
const forbiddenStringContracts = [
/toContain\(\s*['"]apiHandler['"]\s*\)/,
/toContain\(\s*['"]submitTask['"]\s*\)/,
/toContain\(\s*['"]maybeSubmitLLMTask['"]\s*\)/,
/includes\(\s*['"]apiHandler['"]\s*\)/,
/includes\(\s*['"]submitTask['"]\s*\)/,
/includes\(\s*['"]maybeSubmitLLMTask['"]\s*\)/,
]
for (const pattern of forbiddenStringContracts) {
if (pattern.test(text)) {
violations.push(`${rel}: forbidden structural string assertion matched ${pattern}`)
break
}
}
const hasWeakCallAssertion = /toHaveBeenCalled\(\s*\)/.test(text)
const hasStrongCallAssertion = /toHaveBeenCalledWith\(/.test(text)
if (hasWeakCallAssertion && !hasStrongCallAssertion) {
violations.push(`${rel}: has toHaveBeenCalled() without any toHaveBeenCalledWith() result assertions`)
}
}
if (violations.length > 0) {
fail('Behavior quality violations found', violations)
}
console.log(`[test-behavior-quality-guard] OK files=${files.length}`)

View File

@@ -0,0 +1,54 @@
#!/usr/bin/env node
import fs from 'fs'
import path from 'path'
const root = process.cwd()
const catalogPath = path.join(root, 'tests', 'contracts', 'route-catalog.ts')
const matrixPath = path.join(root, 'tests', 'contracts', 'route-behavior-matrix.ts')
function fail(title, details = []) {
console.error(`\n[test-behavior-route-coverage-guard] ${title}`)
for (const detail of details) {
console.error(` - ${detail}`)
}
process.exit(1)
}
if (!fs.existsSync(catalogPath)) {
fail('route catalog is missing', ['tests/contracts/route-catalog.ts'])
}
if (!fs.existsSync(matrixPath)) {
fail('route behavior matrix is missing', ['tests/contracts/route-behavior-matrix.ts'])
}
const catalogText = fs.readFileSync(catalogPath, 'utf8')
const matrixText = fs.readFileSync(matrixPath, 'utf8')
if (!matrixText.includes('ROUTE_CATALOG.map')) {
fail('route behavior matrix must derive entries from ROUTE_CATALOG.map')
}
const routeFilesBlockMatch = catalogText.match(/const ROUTE_FILES = \[([\s\S]*?)\] as const/)
if (!routeFilesBlockMatch) {
fail('unable to parse ROUTE_FILES block from route catalog')
}
const routeFilesBlock = routeFilesBlockMatch ? routeFilesBlockMatch[1] : ''
const routeCount = Array.from(routeFilesBlock.matchAll(/'src\/app\/api\/[^']+\/route\.ts'/g)).length
if (routeCount === 0) {
fail('no routes detected in route catalog')
}
const testFiles = Array.from(matrixText.matchAll(/'tests\/[a-zA-Z0-9_\-/.]+\.test\.ts'/g))
.map((match) => match[0].slice(1, -1))
if (testFiles.length === 0) {
fail('route behavior matrix does not declare any behavior test files')
}
const missingTests = Array.from(new Set(testFiles)).filter((file) => !fs.existsSync(path.join(root, file)))
if (missingTests.length > 0) {
fail('route behavior matrix references missing test files', missingTests)
}
console.log(`[test-behavior-route-coverage-guard] OK routes=${routeCount} tests=${new Set(testFiles).size}`)

View File

@@ -0,0 +1,49 @@
#!/usr/bin/env node
import fs from 'fs'
import path from 'path'
const root = process.cwd()
const catalogPath = path.join(root, 'tests', 'contracts', 'task-type-catalog.ts')
const matrixPath = path.join(root, 'tests', 'contracts', 'tasktype-behavior-matrix.ts')
function fail(title, details = []) {
console.error(`\n[test-behavior-tasktype-coverage-guard] ${title}`)
for (const detail of details) {
console.error(` - ${detail}`)
}
process.exit(1)
}
if (!fs.existsSync(catalogPath)) {
fail('task type catalog is missing', ['tests/contracts/task-type-catalog.ts'])
}
if (!fs.existsSync(matrixPath)) {
fail('tasktype behavior matrix is missing', ['tests/contracts/tasktype-behavior-matrix.ts'])
}
const catalogText = fs.readFileSync(catalogPath, 'utf8')
const matrixText = fs.readFileSync(matrixPath, 'utf8')
if (!matrixText.includes('TASK_TYPE_CATALOG.map')) {
fail('tasktype behavior matrix must derive entries from TASK_TYPE_CATALOG.map')
}
const taskTypeCount = Array.from(catalogText.matchAll(/\[TASK_TYPE\.([A-Z_]+)\]/g)).length
if (taskTypeCount === 0) {
fail('no task types detected in task type catalog')
}
const testFiles = Array.from(matrixText.matchAll(/'tests\/[a-zA-Z0-9_\-/.]+\.test\.ts'/g))
.map((match) => match[0].slice(1, -1))
if (testFiles.length === 0) {
fail('tasktype behavior matrix does not declare any behavior test files')
}
const missingTests = Array.from(new Set(testFiles)).filter((file) => !fs.existsSync(path.join(root, file)))
if (missingTests.length > 0) {
fail('tasktype behavior matrix references missing test files', missingTests)
}
console.log(`[test-behavior-tasktype-coverage-guard] OK taskTypes=${taskTypeCount} tests=${new Set(testFiles).size}`)

View File

@@ -0,0 +1,57 @@
#!/usr/bin/env node
import fs from 'fs'
import path from 'path'
const root = process.cwd()
const apiDir = path.join(root, 'src', 'app', 'api')
const catalogPath = path.join(root, 'tests', 'contracts', 'route-catalog.ts')
function fail(title, details = []) {
console.error(`\n[test-route-coverage-guard] ${title}`)
for (const detail of details) {
console.error(` - ${detail}`)
}
process.exit(1)
}
function walk(dir, out = []) {
if (!fs.existsSync(dir)) return out
const entries = fs.readdirSync(dir, { withFileTypes: true })
for (const entry of entries) {
if (entry.name === '.git' || entry.name === '.next' || entry.name === 'node_modules') continue
const fullPath = path.join(dir, entry.name)
if (entry.isDirectory()) {
walk(fullPath, out)
continue
}
if (entry.name === 'route.ts') out.push(fullPath)
}
return out
}
function toRel(fullPath) {
return path.relative(root, fullPath).split(path.sep).join('/')
}
if (!fs.existsSync(catalogPath)) {
fail('route-catalog.ts is missing', ['tests/contracts/route-catalog.ts'])
}
const actualRoutes = walk(apiDir).map(toRel).sort()
const catalogText = fs.readFileSync(catalogPath, 'utf8')
const catalogRoutes = Array.from(catalogText.matchAll(/'src\/app\/api\/[^']+\/route\.ts'/g))
.map((match) => match[0].slice(1, -1))
.sort()
const missingInCatalog = actualRoutes.filter((routeFile) => !catalogRoutes.includes(routeFile))
const staleInCatalog = catalogRoutes.filter((routeFile) => !actualRoutes.includes(routeFile))
if (missingInCatalog.length > 0) {
fail('Missing routes in tests/contracts/route-catalog.ts', missingInCatalog)
}
if (staleInCatalog.length > 0) {
fail('Stale route entries found in tests/contracts/route-catalog.ts', staleInCatalog)
}
console.log(`[test-route-coverage-guard] OK routes=${actualRoutes.length}`)

View File

@@ -0,0 +1,46 @@
#!/usr/bin/env node
import fs from 'fs'
import path from 'path'
const root = process.cwd()
const taskTypesPath = path.join(root, 'src', 'lib', 'task', 'types.ts')
const catalogPath = path.join(root, 'tests', 'contracts', 'task-type-catalog.ts')
function fail(title, details = []) {
console.error(`\n[test-tasktype-coverage-guard] ${title}`)
for (const detail of details) {
console.error(` - ${detail}`)
}
process.exit(1)
}
if (!fs.existsSync(taskTypesPath)) {
fail('Task type source file is missing', ['src/lib/task/types.ts'])
}
if (!fs.existsSync(catalogPath)) {
fail('Task type catalog file is missing', ['tests/contracts/task-type-catalog.ts'])
}
const taskTypesText = fs.readFileSync(taskTypesPath, 'utf8')
const catalogText = fs.readFileSync(catalogPath, 'utf8')
const taskTypeBlockMatch = taskTypesText.match(/export const TASK_TYPE = \{([\s\S]*?)\n\} as const/)
if (!taskTypeBlockMatch) {
fail('Unable to parse TASK_TYPE block from src/lib/task/types.ts')
}
const taskTypeBlock = taskTypeBlockMatch ? taskTypeBlockMatch[1] : ''
const taskTypeKeys = Array.from(taskTypeBlock.matchAll(/^\s+([A-Z_]+):\s'[^']+',?$/gm)).map((match) => match[1])
const catalogKeys = Array.from(catalogText.matchAll(/\[TASK_TYPE\.([A-Z_]+)\]/g)).map((match) => match[1])
const missingKeys = taskTypeKeys.filter((key) => !catalogKeys.includes(key))
const staleKeys = catalogKeys.filter((key) => !taskTypeKeys.includes(key))
if (missingKeys.length > 0) {
fail('Missing TASK_TYPE owners in tests/contracts/task-type-catalog.ts', missingKeys)
}
if (staleKeys.length > 0) {
fail('Stale TASK_TYPE keys in tests/contracts/task-type-catalog.ts', staleKeys)
}
console.log(`[test-tasktype-coverage-guard] OK taskTypes=${taskTypeKeys.length}`)

View File

@@ -0,0 +1,127 @@
import { logInfo as _ulogInfo, logError as _ulogError } from '@/lib/logging/core'
import { createHash } from 'node:crypto'
import { promises as fs } from 'node:fs'
import path from 'node:path'
import { prisma } from '@/lib/prisma'
import { MEDIA_MODEL_MAPPINGS } from './media-mapping'
const BACKUP_ROOT = path.join(process.cwd(), 'data', 'migration-backups')
const BATCH_SIZE = 500
type DynamicModel = {
findMany: (args: unknown) => Promise<Array<Record<string, unknown>>>
createMany?: (args: unknown) => Promise<unknown>
}
const prismaDynamic = prisma as unknown as Record<string, DynamicModel>
function nowStamp() {
return new Date().toISOString().replace(/[:.]/g, '-')
}
function checksum(value: string) {
return createHash('sha256').update(value).digest('hex')
}
function toSelect(fields: string[]) {
const select: Record<string, true> = { id: true }
for (const field of fields) select[field] = true
return select
}
async function main() {
const runId = nowStamp()
const backupDir = path.join(BACKUP_ROOT, runId)
await fs.mkdir(backupDir, { recursive: true })
const allRows: Array<{
runId: string
tableName: string
rowId: string
fieldName: string
legacyValue: string
checksum: string
}> = []
for (const mapping of MEDIA_MODEL_MAPPINGS) {
const model = prismaDynamic[mapping.model]
if (!model) continue
const select = toSelect(mapping.fields.map((f) => f.legacyField))
let cursor: string | null = null
while (true) {
const page = await model.findMany({
select,
...(cursor
? {
cursor: { id: cursor },
skip: 1,
}
: {}),
orderBy: { id: 'asc' },
take: BATCH_SIZE,
})
if (!page.length) break
for (const row of page) {
for (const field of mapping.fields) {
const value = row[field.legacyField]
if (typeof value !== 'string' || !value.trim()) continue
allRows.push({
runId,
tableName: mapping.tableName,
rowId: String(row.id),
fieldName: field.legacyField,
legacyValue: value,
checksum: checksum(value),
})
}
}
cursor = String(page[page.length - 1].id)
}
}
if (allRows.length > 0) {
try {
const backupModel = prismaDynamic.legacyMediaRefBackup
if (!backupModel?.createMany) {
throw new Error('Prisma model not found: legacyMediaRefBackup')
}
for (let i = 0; i < allRows.length; i += 1000) {
const chunk = allRows.slice(i, i + 1000)
await backupModel.createMany({ data: chunk })
}
} catch (error) {
const message = error instanceof Error ? error.message : String(error)
_ulogError('[media-archive-legacy-refs] db backup table unavailable, fallback to file snapshot only', message)
}
}
const snapshotPath = path.join(backupDir, 'legacy-media-refs.json')
await fs.writeFile(snapshotPath, JSON.stringify(allRows, null, 2), 'utf8')
const snapshotHash = checksum(await fs.readFile(snapshotPath, 'utf8'))
const summary = {
runId,
createdAt: new Date().toISOString(),
backupDir,
archivedCount: allRows.length,
snapshotFile: path.basename(snapshotPath),
snapshotSha256: snapshotHash,
}
await fs.writeFile(path.join(backupDir, 'legacy-media-refs-summary.json'), JSON.stringify(summary, null, 2), 'utf8')
_ulogInfo(`[media-archive-legacy-refs] runId=${runId}`)
_ulogInfo(`[media-archive-legacy-refs] archived=${allRows.length}`)
_ulogInfo(`[media-archive-legacy-refs] snapshot=${snapshotPath}`)
}
main()
.catch((error) => {
_ulogError('[media-archive-legacy-refs] failed:', error)
process.exitCode = 1
})
.finally(async () => {
await prisma.$disconnect()
})

View File

@@ -0,0 +1,122 @@
import { logInfo as _ulogInfo, logError as _ulogError } from '@/lib/logging/core'
import { prisma } from '@/lib/prisma'
import { resolveMediaRefFromLegacyValue } from '@/lib/media/service'
import { MEDIA_MODEL_MAPPINGS } from './media-mapping'
const BATCH_SIZE = 200
type DynamicModel = {
findMany: (args: unknown) => Promise<Array<Record<string, unknown>>>
update: (args: unknown) => Promise<unknown>
}
const prismaDynamic = prisma as unknown as Record<string, DynamicModel>
function toSelect(fields: string[]) {
const select: Record<string, true> = { id: true }
for (const field of fields) select[field] = true
return select
}
async function backfillModel(mapping: (typeof MEDIA_MODEL_MAPPINGS)[number]) {
const model = prismaDynamic[mapping.model]
if (!model) {
throw new Error(`Prisma model not found: ${mapping.model}`)
}
const selectFields = mapping.fields.flatMap((f) => [f.legacyField, f.mediaIdField])
const select = toSelect(selectFields)
let cursor: string | null = null
let scanned = 0
let updated = 0
try {
while (true) {
const rows = await model.findMany({
select,
...(cursor
? {
cursor: { id: cursor },
skip: 1,
}
: {}),
orderBy: { id: 'asc' },
take: BATCH_SIZE,
})
if (!rows.length) break
for (const row of rows) {
scanned += 1
const patch: Record<string, string> = {}
for (const field of mapping.fields) {
const mediaId = row[field.mediaIdField]
const legacyValue = row[field.legacyField]
if (mediaId || typeof legacyValue !== 'string' || !legacyValue.trim()) {
continue
}
const media = await resolveMediaRefFromLegacyValue(legacyValue)
if (!media) continue
patch[field.mediaIdField] = media.id
}
if (Object.keys(patch).length > 0) {
await model.update({
where: { id: String(row.id) },
data: patch,
})
updated += 1
}
}
cursor = String(rows[rows.length - 1].id)
}
} catch (error) {
const message = error instanceof Error ? error.message : String(error)
if (message.includes('does not exist') || message.includes('Unknown column')) {
_ulogError(
`[media-backfill-refs] skip ${mapping.tableName}: migration columns not available yet`,
message,
)
return { scanned: 0, updated: 0, skipped: true }
}
throw error
}
return { scanned, updated, skipped: false }
}
async function main() {
const startedAt = new Date()
_ulogInfo(`[media-backfill-refs] started at ${startedAt.toISOString()}`)
let totalScanned = 0
let totalUpdated = 0
for (const mapping of MEDIA_MODEL_MAPPINGS) {
const result = await backfillModel(mapping)
totalScanned += result.scanned
totalUpdated += result.updated
if (result.skipped) {
_ulogInfo(`[media-backfill-refs] ${mapping.tableName}: skipped (run add-only DB migration first)`)
} else {
_ulogInfo(
`[media-backfill-refs] ${mapping.tableName}: scanned=${result.scanned} updatedRows=${result.updated}`,
)
}
}
_ulogInfo(
`[media-backfill-refs] done scanned=${totalScanned} updatedRows=${totalUpdated} durationMs=${Date.now() - startedAt.getTime()}`,
)
}
main()
.catch((error) => {
_ulogError('[media-backfill-refs] failed:', error)
process.exitCode = 1
})
.finally(async () => {
await prisma.$disconnect()
})

View File

@@ -0,0 +1,202 @@
import { logInfo as _ulogInfo, logError as _ulogError } from '@/lib/logging/core'
import { promises as fs } from 'node:fs'
import path from 'node:path'
import COS from 'cos-nodejs-sdk-v5'
import { prisma } from '@/lib/prisma'
import { resolveStorageKeyFromMediaValue } from '@/lib/media/service'
import { MEDIA_MODEL_MAPPINGS } from './media-mapping'
type StorageEntry = {
key: string
sizeBytes: number
lastModified: string | null
}
type CosBucketPage = {
Contents?: Array<{ Key: string; Size?: string | number; LastModified?: string }>
IsTruncated?: string | boolean
NextMarker?: string
}
type DynamicModel = {
findMany: (args: unknown) => Promise<Array<Record<string, unknown>>>
}
const prismaDynamic = prisma as unknown as Record<string, DynamicModel>
const BACKUP_ROOT = path.join(process.cwd(), 'data', 'migration-backups')
function nowStamp() {
return new Date().toISOString().replace(/[:.]/g, '-')
}
async function listLocalObjects(): Promise<StorageEntry[]> {
const uploadDir = process.env.UPLOAD_DIR || './data/uploads'
const rootDir = path.isAbsolute(uploadDir) ? uploadDir : path.join(process.cwd(), uploadDir)
const exists = await fs.stat(rootDir).then(() => true).catch(() => false)
if (!exists) return []
const rows: StorageEntry[] = []
const queue = ['']
while (queue.length > 0) {
const rel = queue.shift() as string
const full = path.join(rootDir, rel)
const entries = await fs.readdir(full, { withFileTypes: true })
for (const entry of entries) {
const childRel = path.join(rel, entry.name)
if (entry.isDirectory()) {
queue.push(childRel)
continue
}
if (!entry.isFile()) continue
const stat = await fs.stat(path.join(rootDir, childRel))
rows.push({
key: childRel.split(path.sep).join('/'),
sizeBytes: stat.size,
lastModified: stat.mtime.toISOString(),
})
}
}
return rows
}
async function listCosObjects(): Promise<StorageEntry[]> {
const secretId = process.env.COS_SECRET_ID
const secretKey = process.env.COS_SECRET_KEY
const bucket = process.env.COS_BUCKET
const region = process.env.COS_REGION
if (!secretId || !secretKey || !bucket || !region) {
throw new Error('Missing COS env: COS_SECRET_ID/COS_SECRET_KEY/COS_BUCKET/COS_REGION')
}
const cos = new COS({ SecretId: secretId, SecretKey: secretKey, Timeout: 60_000 })
const rows: StorageEntry[] = []
let marker = ''
while (true) {
const page = await new Promise<CosBucketPage>((resolve, reject) => {
cos.getBucket(
{
Bucket: bucket,
Region: region,
Marker: marker,
MaxKeys: 1000,
},
(err, data) => (err ? reject(err) : resolve(data as unknown as CosBucketPage)),
)
})
const contents = page.Contents || []
for (const item of contents) {
rows.push({
key: item.Key,
sizeBytes: Number(item.Size || 0),
lastModified: item.LastModified || null,
})
}
const truncated = String(page.IsTruncated || 'false') === 'true'
if (!truncated) break
const nextMarker = typeof page.NextMarker === 'string' ? page.NextMarker : ''
marker = nextMarker || (contents.length ? contents[contents.length - 1].Key : '')
if (!marker) break
}
return rows
}
async function listStorageObjects() {
const storageType = process.env.STORAGE_TYPE || 'cos'
if (storageType === 'local') {
return { storageType, rows: await listLocalObjects() }
}
return { storageType, rows: await listCosObjects() }
}
async function buildReferencedKeySet() {
const refs = new Set<string>()
try {
const mediaRows = await prismaDynamic.mediaObject.findMany({
select: { storageKey: true },
})
for (const row of mediaRows) {
if (typeof row.storageKey === 'string' && row.storageKey.trim()) refs.add(row.storageKey)
}
} catch (error) {
const message = error instanceof Error ? error.message : String(error)
_ulogError('[media-build-unreferenced-index] media_objects unavailable, fallback to legacy field scan', message)
}
for (const mapping of MEDIA_MODEL_MAPPINGS) {
const model = prismaDynamic[mapping.model]
if (!model) continue
const select: Record<string, true> = { id: true }
for (const field of mapping.fields) select[field.legacyField] = true
let cursor: string | null = null
while (true) {
const rows = await model.findMany({
select,
...(cursor
? {
cursor: { id: cursor },
skip: 1,
}
: {}),
orderBy: { id: 'asc' },
take: 500,
})
if (!rows.length) break
for (const row of rows) {
for (const field of mapping.fields) {
const value = row[field.legacyField]
if (typeof value !== 'string' || !value.trim()) continue
const key = await resolveStorageKeyFromMediaValue(value)
if (key) refs.add(key)
}
}
cursor = String(rows[rows.length - 1].id)
}
}
return refs
}
async function main() {
const stamp = nowStamp()
const backupDir = path.join(BACKUP_ROOT, stamp)
await fs.mkdir(backupDir, { recursive: true })
const referenced = await buildReferencedKeySet()
const storage = await listStorageObjects()
const unreferenced = storage.rows.filter((row) => !referenced.has(row.key))
const output = {
createdAt: new Date().toISOString(),
storageType: storage.storageType,
totalStorageObjects: storage.rows.length,
referencedKeyCount: referenced.size,
unreferencedCount: unreferenced.length,
objects: unreferenced,
}
const filePath = path.join(backupDir, 'unreferenced-storage-objects-index.json')
await fs.writeFile(filePath, JSON.stringify(output, null, 2), 'utf8')
_ulogInfo(`[media-build-unreferenced-index] storageType=${storage.storageType}`)
_ulogInfo(`[media-build-unreferenced-index] total=${storage.rows.length} unreferenced=${unreferenced.length}`)
_ulogInfo(`[media-build-unreferenced-index] output=${filePath}`)
}
main()
.catch((error) => {
_ulogError('[media-build-unreferenced-index] failed:', error)
process.exitCode = 1
})
.finally(async () => {
await prisma.$disconnect()
})

90
scripts/media-mapping.ts Normal file
View File

@@ -0,0 +1,90 @@
export type MediaFieldMapping = {
legacyField: string
mediaIdField: string
}
export type MediaModelMapping = {
model: string
tableName: string
fields: MediaFieldMapping[]
}
export const MEDIA_MODEL_MAPPINGS: MediaModelMapping[] = [
{
model: 'characterAppearance',
tableName: 'character_appearances',
fields: [{ legacyField: 'imageUrl', mediaIdField: 'imageMediaId' }],
},
{
model: 'locationImage',
tableName: 'location_images',
fields: [{ legacyField: 'imageUrl', mediaIdField: 'imageMediaId' }],
},
{
model: 'novelPromotionCharacter',
tableName: 'novel_promotion_characters',
fields: [{ legacyField: 'customVoiceUrl', mediaIdField: 'customVoiceMediaId' }],
},
{
model: 'novelPromotionEpisode',
tableName: 'novel_promotion_episodes',
fields: [{ legacyField: 'audioUrl', mediaIdField: 'audioMediaId' }],
},
{
model: 'novelPromotionPanel',
tableName: 'novel_promotion_panels',
fields: [
{ legacyField: 'imageUrl', mediaIdField: 'imageMediaId' },
{ legacyField: 'videoUrl', mediaIdField: 'videoMediaId' },
{ legacyField: 'lipSyncVideoUrl', mediaIdField: 'lipSyncVideoMediaId' },
{ legacyField: 'sketchImageUrl', mediaIdField: 'sketchImageMediaId' },
{ legacyField: 'previousImageUrl', mediaIdField: 'previousImageMediaId' },
],
},
{
model: 'novelPromotionShot',
tableName: 'novel_promotion_shots',
fields: [{ legacyField: 'imageUrl', mediaIdField: 'imageMediaId' }],
},
{
model: 'supplementaryPanel',
tableName: 'supplementary_panels',
fields: [{ legacyField: 'imageUrl', mediaIdField: 'imageMediaId' }],
},
{
model: 'novelPromotionVoiceLine',
tableName: 'novel_promotion_voice_lines',
fields: [{ legacyField: 'audioUrl', mediaIdField: 'audioMediaId' }],
},
{
model: 'voicePreset',
tableName: 'voice_presets',
fields: [{ legacyField: 'audioUrl', mediaIdField: 'audioMediaId' }],
},
{
model: 'globalCharacter',
tableName: 'global_characters',
fields: [{ legacyField: 'customVoiceUrl', mediaIdField: 'customVoiceMediaId' }],
},
{
model: 'globalCharacterAppearance',
tableName: 'global_character_appearances',
fields: [
{ legacyField: 'imageUrl', mediaIdField: 'imageMediaId' },
{ legacyField: 'previousImageUrl', mediaIdField: 'previousImageMediaId' },
],
},
{
model: 'globalLocationImage',
tableName: 'global_location_images',
fields: [
{ legacyField: 'imageUrl', mediaIdField: 'imageMediaId' },
{ legacyField: 'previousImageUrl', mediaIdField: 'previousImageMediaId' },
],
},
{
model: 'globalVoice',
tableName: 'global_voices',
fields: [{ legacyField: 'customVoiceUrl', mediaIdField: 'customVoiceMediaId' }],
},
]

View File

@@ -0,0 +1,111 @@
import { logInfo as _ulogInfo, logError as _ulogError } from '@/lib/logging/core'
import { promises as fs } from 'node:fs'
import path from 'node:path'
import { prisma } from '@/lib/prisma'
const BACKUP_ROOT = path.join(process.cwd(), 'data', 'migration-backups')
type CountMap = Record<string, number>
async function findLatestBackupDir() {
const exists = await fs.stat(BACKUP_ROOT).then(() => true).catch(() => false)
if (!exists) {
throw new Error(`Backup root not found: ${BACKUP_ROOT}`)
}
const dirs = (await fs.readdir(BACKUP_ROOT, { withFileTypes: true }))
.filter((d) => d.isDirectory())
.map((d) => d.name)
.sort()
const validDirs: string[] = []
for (const dir of dirs) {
const metadataPath = path.join(BACKUP_ROOT, dir, 'metadata.json')
const exists = await fs.stat(metadataPath).then(() => true).catch(() => false)
if (exists) validDirs.push(dir)
}
if (!validDirs.length) {
throw new Error(`No backup directories found in ${BACKUP_ROOT}`)
}
return path.join(BACKUP_ROOT, validDirs[validDirs.length - 1])
}
async function readExpectedCounts(backupDir: string): Promise<CountMap> {
const metadataPath = path.join(backupDir, 'metadata.json')
const raw = await fs.readFile(metadataPath, 'utf8')
const parsed = JSON.parse(raw)
return (parsed.tableCounts || {}) as CountMap
}
async function currentCounts(): Promise<CountMap> {
const entries: Array<[string, string]> = [
['projects', 'projects'],
['novel_promotion_projects', 'novel_promotion_projects'],
['novel_promotion_episodes', 'novel_promotion_episodes'],
['novel_promotion_panels', 'novel_promotion_panels'],
['novel_promotion_voice_lines', 'novel_promotion_voice_lines'],
['global_characters', 'global_characters'],
['global_character_appearances', 'global_character_appearances'],
['global_locations', 'global_locations'],
['global_location_images', 'global_location_images'],
['global_voices', 'global_voices'],
['tasks', 'tasks'],
['task_events', 'task_events'],
]
const resolved = await Promise.all(entries.map(async ([name, tableName]) => {
const rows = (await prisma.$queryRawUnsafe(
`SELECT COUNT(*) AS c FROM \`${tableName}\``,
)) as Array<Record<string, unknown>>
const raw = rows[0] || {}
const firstValue = Object.values(raw)[0]
const count = Number(firstValue || 0)
return [name, Number.isFinite(count) ? count : 0] as const
}))
const out: CountMap = {}
for (const [name, count] of resolved) out[name] = count
return out
}
function printDiff(expected: CountMap, actual: CountMap) {
const keys = [...new Set([...Object.keys(expected), ...Object.keys(actual)])].sort()
let hasDiff = false
_ulogInfo('table\texpected\tactual\tdelta')
for (const key of keys) {
const e = expected[key] ?? 0
const a = actual[key] ?? 0
const d = a - e
if (d !== 0) hasDiff = true
_ulogInfo(`${key}\t${e}\t${a}\t${d >= 0 ? '+' : ''}${d}`)
}
return hasDiff
}
async function main() {
const explicit = process.argv.find((arg) => arg.startsWith('--backup='))
const backupDir = explicit ? path.resolve(explicit.split('=')[1]) : await findLatestBackupDir()
_ulogInfo(`[media-restore-dry-run] backupDir=${backupDir}`)
const expected = await readExpectedCounts(backupDir)
const actual = await currentCounts()
const hasDiff = printDiff(expected, actual)
if (hasDiff) {
_ulogInfo('[media-restore-dry-run] drift detected (dry-run only, no writes executed).')
process.exitCode = 2
return
}
_ulogInfo('[media-restore-dry-run] ok: counts match expected snapshot.')
}
main()
.catch((error) => {
_ulogError('[media-restore-dry-run] failed:', error)
process.exitCode = 1
})
.finally(async () => {
await prisma.$disconnect()
})

View File

@@ -0,0 +1,247 @@
import { logInfo as _ulogInfo, logError as _ulogError } from '@/lib/logging/core'
import { createHash } from 'node:crypto'
import { promises as fs } from 'node:fs'
import path from 'node:path'
import COS from 'cos-nodejs-sdk-v5'
import { prisma } from '@/lib/prisma'
type SnapshotTask = {
name: string
tableName: string
}
type StorageIndexRow = {
key: string
hash: string | null
sizeBytes: number
lastModified: string | null
}
type CosBucketPage = {
Contents?: Array<{
Key: string
ETag?: string
Size?: string | number
LastModified?: string
}>
IsTruncated?: string | boolean
NextMarker?: string
}
const BACKUP_ROOT = path.join(process.cwd(), 'data', 'migration-backups')
function nowStamp() {
return new Date().toISOString().replace(/[:.]/g, '-')
}
function toJson(value: unknown) {
return JSON.stringify(
value,
(_key, val) => (typeof val === 'bigint' ? String(val) : val),
2,
)
}
async function writeJson(filePath: string, data: unknown) {
await fs.writeFile(filePath, toJson(data), 'utf8')
}
function sha256Text(input: string) {
return createHash('sha256').update(input).digest('hex')
}
function resolveDatabaseFilePath(databaseUrl: string | undefined): string | null {
if (!databaseUrl) return null
if (databaseUrl.startsWith('file:')) {
const raw = databaseUrl.slice('file:'.length)
if (!raw) return null
return path.isAbsolute(raw) ? raw : path.join(process.cwd(), raw)
}
return null
}
async function listLocalFilesRecursively(rootDir: string, prefix = ''): Promise<StorageIndexRow[]> {
const fullDir = path.join(rootDir, prefix)
const entries = await fs.readdir(fullDir, { withFileTypes: true })
const out: StorageIndexRow[] = []
for (const entry of entries) {
const rel = path.join(prefix, entry.name)
if (entry.isDirectory()) {
out.push(...(await listLocalFilesRecursively(rootDir, rel)))
continue
}
if (!entry.isFile()) continue
const filePath = path.join(rootDir, rel)
const stat = await fs.stat(filePath)
const buf = await fs.readFile(filePath)
out.push({
key: rel.split(path.sep).join('/'),
hash: createHash('sha256').update(buf).digest('hex'),
sizeBytes: stat.size,
lastModified: stat.mtime.toISOString(),
})
}
return out
}
async function listCosObjects(): Promise<StorageIndexRow[]> {
const secretId = process.env.COS_SECRET_ID
const secretKey = process.env.COS_SECRET_KEY
const bucket = process.env.COS_BUCKET
const region = process.env.COS_REGION
if (!secretId || !secretKey || !bucket || !region) {
throw new Error('Missing COS env: COS_SECRET_ID/COS_SECRET_KEY/COS_BUCKET/COS_REGION')
}
const cos = new COS({ SecretId: secretId, SecretKey: secretKey, Timeout: 60_000 })
const out: StorageIndexRow[] = []
let marker = ''
while (true) {
const page = await new Promise<CosBucketPage>((resolve, reject) => {
cos.getBucket(
{
Bucket: bucket,
Region: region,
Marker: marker,
MaxKeys: 1000,
},
(err, data) => (err ? reject(err) : resolve((data || {}) as CosBucketPage)),
)
})
const contents = page.Contents || []
for (const item of contents) {
out.push({
key: item.Key,
hash: item.ETag ? String(item.ETag).replaceAll('"', '') : null,
sizeBytes: Number(item.Size || 0),
lastModified: item.LastModified || null,
})
}
const truncated = String(page.IsTruncated || 'false') === 'true'
if (!truncated) break
marker = page.NextMarker || (contents.length ? contents[contents.length - 1].Key : '')
if (!marker) break
}
return out
}
async function buildStorageIndex(): Promise<{ storageType: string; rows: StorageIndexRow[] }> {
const storageType = process.env.STORAGE_TYPE || 'cos'
if (storageType === 'local') {
const uploadDir = process.env.UPLOAD_DIR || './data/uploads'
const rootDir = path.isAbsolute(uploadDir) ? uploadDir : path.join(process.cwd(), uploadDir)
const exists = await fs.stat(rootDir).then(() => true).catch(() => false)
if (!exists) {
return { storageType, rows: [] }
}
const rows = await listLocalFilesRecursively(rootDir)
return { storageType, rows }
}
const rows = await listCosObjects()
return { storageType, rows }
}
async function snapshotTables(backupDir: string) {
const tasks: SnapshotTask[] = [
{ name: 'projects', tableName: 'projects' },
{ name: 'novel_promotion_projects', tableName: 'novel_promotion_projects' },
{ name: 'novel_promotion_episodes', tableName: 'novel_promotion_episodes' },
{ name: 'novel_promotion_panels', tableName: 'novel_promotion_panels' },
{ name: 'novel_promotion_voice_lines', tableName: 'novel_promotion_voice_lines' },
{ name: 'global_characters', tableName: 'global_characters' },
{ name: 'global_character_appearances', tableName: 'global_character_appearances' },
{ name: 'global_locations', tableName: 'global_locations' },
{ name: 'global_location_images', tableName: 'global_location_images' },
{ name: 'global_voices', tableName: 'global_voices' },
{ name: 'tasks', tableName: 'tasks' },
{ name: 'task_events', tableName: 'task_events' },
]
const counts: Record<string, number> = {}
for (const task of tasks) {
const rows = (await prisma.$queryRawUnsafe(`SELECT * FROM \`${task.tableName}\``)) as unknown[]
counts[task.name] = rows.length
await writeJson(path.join(backupDir, `${task.name}.json`), rows)
}
return counts
}
async function writeChecksums(backupDir: string) {
const files = (await fs.readdir(backupDir)).sort()
const sums: Record<string, string> = {}
for (const file of files) {
const filePath = path.join(backupDir, file)
const stat = await fs.stat(filePath)
if (!stat.isFile()) continue
const buf = await fs.readFile(filePath)
sums[file] = createHash('sha256').update(buf).digest('hex')
}
await writeJson(path.join(backupDir, 'checksums.json'), sums)
}
async function backupDbFile(backupDir: string) {
const dbFile = resolveDatabaseFilePath(process.env.DATABASE_URL)
if (!dbFile) return null
const stat = await fs.stat(dbFile).catch(() => null)
if (!stat || !stat.isFile()) return null
const fileName = path.basename(dbFile)
const target = path.join(backupDir, `db-file-${fileName}`)
await fs.copyFile(dbFile, target)
return path.basename(target)
}
async function main() {
const stamp = nowStamp()
const backupDir = path.join(BACKUP_ROOT, stamp)
await fs.mkdir(backupDir, { recursive: true })
const meta: Record<string, unknown> = {
createdAt: new Date().toISOString(),
backupDir,
databaseUrl: process.env.DATABASE_URL || null,
storageType: process.env.STORAGE_TYPE || 'cos',
nodeEnv: process.env.NODE_ENV || null,
}
const copiedDbFile = await backupDbFile(backupDir)
meta.copiedDbFile = copiedDbFile
const tableCounts = await snapshotTables(backupDir)
meta.tableCounts = tableCounts
const storage = await buildStorageIndex()
meta.storageType = storage.storageType
meta.storageObjectCount = storage.rows.length
await writeJson(path.join(backupDir, 'storage-object-index.json'), storage.rows)
await writeChecksums(backupDir)
meta.metadataChecksum = sha256Text(toJson(meta))
await writeJson(path.join(backupDir, 'metadata.json'), meta)
_ulogInfo(`[media-safety-backup] done: ${backupDir}`)
_ulogInfo(`[media-safety-backup] tableCounts=${JSON.stringify(tableCounts)}`)
_ulogInfo(`[media-safety-backup] storageObjects=${storage.rows.length}`)
}
main()
.catch((error) => {
_ulogError('[media-safety-backup] failed:', error)
process.exitCode = 1
})
.finally(async () => {
await prisma.$disconnect()
})

View File

@@ -0,0 +1,72 @@
import { prisma } from '@/lib/prisma'
const OLD_STATUS = 'cancelled'
const NEW_STATUS = 'failed'
const OLD_EVENT_TYPE = 'task.cancelled'
const NEW_EVENT_TYPE = 'task.failed'
const MIGRATION_ERROR_CODE = 'USER_CANCELLED'
const MIGRATION_ERROR_MESSAGE = '用户已停止任务。'
function log(message: string) {
process.stdout.write(`${message}\n`)
}
function logError(message: string) {
process.stderr.write(`${message}\n`)
}
async function main() {
const totalTasks = await prisma.task.count({
where: { status: OLD_STATUS },
})
const totalEvents = await prisma.taskEvent.count({
where: { eventType: OLD_EVENT_TYPE },
})
log(`[migrate-cancelled-to-failed] matched tasks: ${totalTasks}`)
log(`[migrate-cancelled-to-failed] matched events: ${totalEvents}`)
if (totalTasks === 0 && totalEvents === 0) {
log('[migrate-cancelled-to-failed] no rows to migrate')
return
}
const taskEmptyMessageResult = await prisma.task.updateMany({
where: {
status: OLD_STATUS,
OR: [{ errorMessage: null }, { errorMessage: '' }],
},
data: {
status: NEW_STATUS,
errorCode: MIGRATION_ERROR_CODE,
errorMessage: MIGRATION_ERROR_MESSAGE,
},
})
const taskResult = await prisma.task.updateMany({
where: { status: OLD_STATUS },
data: {
status: NEW_STATUS,
errorCode: MIGRATION_ERROR_CODE,
},
})
const eventResult = await prisma.taskEvent.updateMany({
where: { eventType: OLD_EVENT_TYPE },
data: {
eventType: NEW_EVENT_TYPE,
},
})
log(`[migrate-cancelled-to-failed] updated tasks (empty message): ${taskEmptyMessageResult.count}`)
log(`[migrate-cancelled-to-failed] updated tasks (remaining): ${taskResult.count}`)
log(`[migrate-cancelled-to-failed] updated events: ${eventResult.count}`)
}
main()
.catch((error) => {
logError(`[migrate-cancelled-to-failed] failed: ${error instanceof Error ? error.stack || error.message : String(error)}`)
process.exitCode = 1
})
.finally(async () => {
await prisma.$disconnect()
})

View File

@@ -0,0 +1,231 @@
import { logInfo as _ulogInfo, logError as _ulogError } from '@/lib/logging/core'
import { prisma } from '@/lib/prisma'
import { encodeImageUrls } from '@/lib/contracts/image-urls-contract'
type AppearanceRow = {
id: string
imageUrls: string | null
previousImageUrls: string | null
}
type DynamicModel = {
findMany: (args: unknown) => Promise<AppearanceRow[]>
update: (args: unknown) => Promise<unknown>
}
type FieldName = 'imageUrls' | 'previousImageUrls'
type NormalizeResult = {
next: string
changed: boolean
reason: 'ok' | 'null' | 'invalid_json' | 'not_array' | 'filtered_non_string' | 'normalized_json'
}
type ModelStats = {
scanned: number
updatedRows: number
changedFields: number
reasons: Record<string, number>
}
const BATCH_SIZE = 200
const APPLY = process.argv.includes('--apply')
const MODELS: Array<{ name: string; model: string }> = [
{ name: 'CharacterAppearance', model: 'characterAppearance' },
{ name: 'GlobalCharacterAppearance', model: 'globalCharacterAppearance' },
]
const prismaDynamic = prisma as unknown as Record<string, DynamicModel>
function print(message: string) {
process.stdout.write(`${message}\n`)
}
function normalizeField(raw: string | null): NormalizeResult {
if (raw === null) {
return {
next: encodeImageUrls([]),
changed: true,
reason: 'null',
}
}
try {
const parsed = JSON.parse(raw) as unknown
if (!Array.isArray(parsed)) {
return {
next: encodeImageUrls([]),
changed: true,
reason: 'not_array',
}
}
const stringOnly = parsed.filter((item): item is string => typeof item === 'string')
const next = encodeImageUrls(stringOnly)
if (parsed.length !== stringOnly.length) {
return {
next,
changed: true,
reason: 'filtered_non_string',
}
}
if (raw !== next) {
return {
next,
changed: true,
reason: 'normalized_json',
}
}
return {
next,
changed: false,
reason: 'ok',
}
} catch {
return {
next: encodeImageUrls([]),
changed: true,
reason: 'invalid_json',
}
}
}
async function migrateModel(modelName: string, modelKey: string) {
const model = prismaDynamic[modelKey]
if (!model) {
throw new Error(`Prisma model not found: ${modelKey}`)
}
const stats: ModelStats = {
scanned: 0,
updatedRows: 0,
changedFields: 0,
reasons: {
ok: 0,
null: 0,
invalid_json: 0,
not_array: 0,
filtered_non_string: 0,
normalized_json: 0,
},
}
const samples: Array<{ id: string; field: FieldName; reason: NormalizeResult['reason']; before: string | null; after: string }> = []
let cursor: string | null = null
while (true) {
const rows = await model.findMany({
select: {
id: true,
imageUrls: true,
previousImageUrls: true,
},
...(cursor
? {
cursor: { id: cursor },
skip: 1,
}
: {}),
orderBy: { id: 'asc' },
take: BATCH_SIZE,
})
if (rows.length === 0) break
for (const row of rows) {
stats.scanned += 1
const imageUrlsResult = normalizeField(row.imageUrls)
const previousImageUrlsResult = normalizeField(row.previousImageUrls)
stats.reasons[imageUrlsResult.reason] += 1
stats.reasons[previousImageUrlsResult.reason] += 1
const data: Partial<Record<FieldName, string>> = {}
if (imageUrlsResult.changed) {
data.imageUrls = imageUrlsResult.next
stats.changedFields += 1
if (samples.length < 20) {
samples.push({
id: row.id,
field: 'imageUrls',
reason: imageUrlsResult.reason,
before: row.imageUrls,
after: imageUrlsResult.next,
})
}
}
if (previousImageUrlsResult.changed) {
data.previousImageUrls = previousImageUrlsResult.next
stats.changedFields += 1
if (samples.length < 20) {
samples.push({
id: row.id,
field: 'previousImageUrls',
reason: previousImageUrlsResult.reason,
before: row.previousImageUrls,
after: previousImageUrlsResult.next,
})
}
}
if (Object.keys(data).length > 0) {
stats.updatedRows += 1
if (APPLY) {
await model.update({
where: { id: row.id },
data,
})
}
}
}
cursor = rows[rows.length - 1]?.id || null
}
const summary = `[migrate-image-urls-contract] ${modelName}: scanned=${stats.scanned} updatedRows=${stats.updatedRows} changedFields=${stats.changedFields}`
_ulogInfo(summary)
print(summary)
print(`[migrate-image-urls-contract] ${modelName}: reasons=${JSON.stringify(stats.reasons)}`)
if (samples.length > 0) {
print(`[migrate-image-urls-contract] ${modelName}: sampleChanges=${JSON.stringify(samples, null, 2)}`)
}
return stats
}
async function main() {
print(`[migrate-image-urls-contract] mode=${APPLY ? 'apply' : 'dry-run'}`)
const totals = {
scanned: 0,
updatedRows: 0,
changedFields: 0,
}
for (const target of MODELS) {
const stats = await migrateModel(target.name, target.model)
totals.scanned += stats.scanned
totals.updatedRows += stats.updatedRows
totals.changedFields += stats.changedFields
}
print(`[migrate-image-urls-contract] done scanned=${totals.scanned} updatedRows=${totals.updatedRows} changedFields=${totals.changedFields} mode=${APPLY ? 'apply' : 'dry-run'}`)
}
main()
.catch((error) => {
_ulogError('[migrate-image-urls-contract] failed:', error)
process.exitCode = 1
})
.finally(async () => {
await prisma.$disconnect()
})

View File

@@ -0,0 +1,310 @@
import { prisma } from '@/lib/prisma'
import {
parseModelKeyStrict,
type CapabilitySelections,
type CapabilityValue,
} from '@/lib/model-config-contract'
import { findBuiltinCapabilities } from '@/lib/model-capabilities/catalog'
const APPLY = process.argv.includes('--apply')
const USER_IMAGE_MODEL_FIELDS = [
'characterModel',
'locationModel',
'storyboardModel',
'editModel',
] as const
const PROJECT_IMAGE_MODEL_FIELDS = [
'characterModel',
'locationModel',
'storyboardModel',
'editModel',
] as const
type UserImageModelField = typeof USER_IMAGE_MODEL_FIELDS[number]
type ProjectImageModelField = typeof PROJECT_IMAGE_MODEL_FIELDS[number]
interface UserPreferenceRow {
id: string
userId: string
imageResolution: string
capabilityDefaults: string | null
characterModel: string | null
locationModel: string | null
storyboardModel: string | null
editModel: string | null
}
interface ProjectRow {
id: string
projectId: string
imageResolution: string
videoResolution: string
capabilityOverrides: string | null
characterModel: string | null
locationModel: string | null
storyboardModel: string | null
editModel: string | null
videoModel: string | null
}
interface MigrationSummary {
mode: 'dry-run' | 'apply'
userPreference: {
scanned: number
updated: number
migratedImageResolution: number
}
novelPromotionProject: {
scanned: number
updated: number
migratedImageResolution: number
migratedVideoResolution: number
}
}
function isRecord(value: unknown): value is Record<string, unknown> {
return !!value && typeof value === 'object' && !Array.isArray(value)
}
function isCapabilityValue(value: unknown): value is CapabilityValue {
return typeof value === 'string' || typeof value === 'number' || typeof value === 'boolean'
}
function normalizeSelections(raw: unknown): CapabilitySelections {
if (!isRecord(raw)) return {}
const normalized: CapabilitySelections = {}
for (const [modelKey, rawSelection] of Object.entries(raw)) {
if (!isRecord(rawSelection)) continue
const nextSelection: Record<string, CapabilityValue> = {}
for (const [field, value] of Object.entries(rawSelection)) {
if (isCapabilityValue(value)) {
nextSelection[field] = value
}
}
normalized[modelKey] = nextSelection
}
return normalized
}
function parseSelections(raw: string | null): CapabilitySelections {
if (!raw) return {}
try {
return normalizeSelections(JSON.parse(raw) as unknown)
} catch {
return {}
}
}
function serializeSelections(selections: CapabilitySelections): string | null {
if (Object.keys(selections).length === 0) return null
return JSON.stringify(selections)
}
function getCapabilityResolutionOptions(
modelType: 'image' | 'video',
modelKey: string,
): string[] {
const parsed = parseModelKeyStrict(modelKey)
if (!parsed) return []
const capabilities = findBuiltinCapabilities(modelType, parsed.provider, parsed.modelId)
const namespace = capabilities?.[modelType]
if (!namespace || !isRecord(namespace)) return []
const resolutionOptions = namespace.resolutionOptions
if (!Array.isArray(resolutionOptions)) return []
return resolutionOptions.filter((item): item is string => typeof item === 'string' && item.trim().length > 0)
}
function ensureModelResolutionSelection(input: {
modelType: 'image' | 'video'
modelKey: string
resolution: string
selections: CapabilitySelections
}): boolean {
const options = getCapabilityResolutionOptions(input.modelType, input.modelKey)
if (options.length === 0) return false
if (!options.includes(input.resolution)) return false
const current = input.selections[input.modelKey]
if (current && current.resolution !== undefined) {
return false
}
input.selections[input.modelKey] = {
...(current || {}),
resolution: input.resolution,
}
return true
}
function collectModelKeys<RowType>(
row: RowType,
fields: readonly (keyof RowType)[],
): string[] {
const modelKeys: string[] = []
for (const field of fields) {
const value = row[field]
if (typeof value !== 'string') continue
const trimmed = value.trim()
if (!trimmed) continue
modelKeys.push(trimmed)
}
return modelKeys
}
async function migrateUserPreferences(summary: MigrationSummary) {
const rows = await prisma.userPreference.findMany({
select: {
id: true,
userId: true,
imageResolution: true,
capabilityDefaults: true,
characterModel: true,
locationModel: true,
storyboardModel: true,
editModel: true,
},
}) as UserPreferenceRow[]
summary.userPreference.scanned = rows.length
for (const row of rows) {
const nextSelections = parseSelections(row.capabilityDefaults)
const modelKeys = collectModelKeys<UserPreferenceRow>(row, USER_IMAGE_MODEL_FIELDS)
let changed = false
for (const modelKey of modelKeys) {
if (ensureModelResolutionSelection({
modelType: 'image',
modelKey,
resolution: row.imageResolution,
selections: nextSelections,
})) {
changed = true
summary.userPreference.migratedImageResolution += 1
}
}
if (!changed) continue
summary.userPreference.updated += 1
if (APPLY) {
await prisma.userPreference.update({
where: { id: row.id },
data: {
capabilityDefaults: serializeSelections(nextSelections),
},
})
}
}
}
async function migrateProjects(summary: MigrationSummary) {
const rows = await prisma.novelPromotionProject.findMany({
select: {
id: true,
projectId: true,
imageResolution: true,
videoResolution: true,
capabilityOverrides: true,
characterModel: true,
locationModel: true,
storyboardModel: true,
editModel: true,
videoModel: true,
},
}) as ProjectRow[]
summary.novelPromotionProject.scanned = rows.length
for (const row of rows) {
const nextSelections = parseSelections(row.capabilityOverrides)
const imageModelKeys = collectModelKeys<ProjectRow>(row, PROJECT_IMAGE_MODEL_FIELDS)
let changed = false
for (const modelKey of imageModelKeys) {
if (ensureModelResolutionSelection({
modelType: 'image',
modelKey,
resolution: row.imageResolution,
selections: nextSelections,
})) {
changed = true
summary.novelPromotionProject.migratedImageResolution += 1
}
}
if (typeof row.videoModel === 'string' && row.videoModel.trim()) {
if (ensureModelResolutionSelection({
modelType: 'video',
modelKey: row.videoModel.trim(),
resolution: row.videoResolution,
selections: nextSelections,
})) {
changed = true
summary.novelPromotionProject.migratedVideoResolution += 1
}
}
if (!changed) continue
summary.novelPromotionProject.updated += 1
if (APPLY) {
await prisma.novelPromotionProject.update({
where: { id: row.id },
data: {
capabilityOverrides: serializeSelections(nextSelections),
},
})
}
}
}
async function main() {
const summary: MigrationSummary = {
mode: APPLY ? 'apply' : 'dry-run',
userPreference: {
scanned: 0,
updated: 0,
migratedImageResolution: 0,
},
novelPromotionProject: {
scanned: 0,
updated: 0,
migratedImageResolution: 0,
migratedVideoResolution: 0,
},
}
await migrateUserPreferences(summary)
await migrateProjects(summary)
process.stdout.write(`${JSON.stringify(summary, null, 2)}\n`)
}
main()
.catch((error: unknown) => {
const message = error instanceof Error ? error.message : String(error)
const missingColumn =
message.includes('capabilityDefaults') || message.includes('capabilityOverrides')
if (missingColumn && message.includes('does not exist')) {
process.stderr.write(
'[migrate-capability-selections] FAILED: required DB columns are missing. ' +
'Apply SQL migration `prisma/migrations/20260215_add_capability_selection_columns.sql` first.\n',
)
} else {
process.stderr.write(`[migrate-capability-selections] FAILED: ${message}\n`)
}
process.exitCode = 1
})
.finally(async () => {
await prisma.$disconnect()
})

View File

@@ -0,0 +1,498 @@
import fs from 'fs'
import path from 'path'
import { prisma } from '@/lib/prisma'
import {
composeModelKey,
parseModelKeyStrict,
validateModelCapabilities,
type ModelCapabilities,
type UnifiedModelType,
} from '@/lib/model-config-contract'
type ModelField =
| 'analysisModel'
| 'characterModel'
| 'locationModel'
| 'storyboardModel'
| 'editModel'
| 'videoModel'
type PreferenceRow = {
id: string
userId: string
customModels: string | null
analysisModel: string | null
characterModel: string | null
locationModel: string | null
storyboardModel: string | null
editModel: string | null
videoModel: string | null
}
type ProjectRow = {
id: string
projectId: string
analysisModel: string | null
characterModel: string | null
locationModel: string | null
storyboardModel: string | null
editModel: string | null
videoModel: string | null
project: {
userId: string
}
}
type MigrationIssue = {
table: 'userPreference' | 'novelPromotionProject'
rowId: string
userId?: string
field: string
kind:
| 'CUSTOM_MODELS_JSON_INVALID'
| 'MODEL_SHAPE_INVALID'
| 'MODEL_TYPE_INVALID'
| 'MODEL_KEY_INCOMPLETE'
| 'MODEL_KEY_MISMATCH'
| 'MODEL_CAPABILITY_INVALID'
| 'LEGACY_MODEL_ID_NOT_FOUND'
| 'LEGACY_MODEL_ID_AMBIGUOUS'
rawValue?: string | null
candidates?: string[]
message: string
}
type MigrationReport = {
generatedAt: string
mode: 'dry-run' | 'apply'
userPreference: {
scanned: number
updated: number
updatedCustomModels: number
updatedDefaultFields: number
}
novelPromotionProject: {
scanned: number
updated: number
updatedFields: number
}
issues: MigrationIssue[]
}
type NormalizedModel = {
provider: string
modelId: string
modelKey: string
name: string
type: UnifiedModelType
price: number
resolution?: '2K' | '4K'
capabilities?: ModelCapabilities
}
const APPLY = process.argv.includes('--apply')
const MAX_ISSUES = 500
const MODEL_FIELDS: readonly ModelField[] = [
'analysisModel',
'characterModel',
'locationModel',
'storyboardModel',
'editModel',
'videoModel',
]
const LEGACY_MODEL_ID_MAP = new Map<string, string>([
['anthropic/claude-sonnet-4.5', 'openrouter::anthropic/claude-sonnet-4.5'],
['google/gemini-3-pro-preview', 'openrouter::google/gemini-3-pro-preview'],
['openai/gpt-5.2', 'openrouter::openai/gpt-5.2'],
['banana', 'fal::banana'],
['banana-2k', 'fal::banana'],
['seedream', 'ark::doubao-seedream-4-0-250828'],
['seedream4.5', 'ark::doubao-seedream-4-5-251128'],
['gemini-3-pro-image-preview', 'google::gemini-3-pro-image-preview'],
['gemini-3-pro-image-preview-batch', 'google::gemini-3-pro-image-preview-batch'],
['nano-banana-pro', 'google::gemini-3-pro-image-preview'],
['gemini-3.0-pro-image-portrait', 'flow2api::gemini-3.0-pro-image-portrait'],
['imagen-4.0-ultra-generate-001', 'google::imagen-4.0-ultra-generate-001'],
['doubao-seedance-1-0-pro-250528', 'ark::doubao-seedance-1-0-pro-250528'],
['doubao-seedance-1-0-pro-fast-251015', 'ark::doubao-seedance-1-0-pro-fast-251015'],
['doubao-seedance-1-0-pro-fast-251015-batch', 'ark::doubao-seedance-1-0-pro-fast-251015-batch'],
])
function parseReportPathArg(): string {
const flagPrefix = '--report='
const inline = process.argv.find((arg) => arg.startsWith(flagPrefix))
if (inline) return inline.slice(flagPrefix.length)
const flagIndex = process.argv.findIndex((arg) => arg === '--report')
if (flagIndex !== -1 && process.argv[flagIndex + 1]) {
return process.argv[flagIndex + 1]
}
return 'scripts/migrations/reports/model-config-migration-report.json'
}
function isRecord(value: unknown): value is Record<string, unknown> {
return !!value && typeof value === 'object' && !Array.isArray(value)
}
function toTrimmedString(value: unknown): string {
return typeof value === 'string' ? value.trim() : ''
}
function isUnifiedModelType(value: unknown): value is UnifiedModelType {
return value === 'llm'
|| value === 'image'
|| value === 'video'
|| value === 'audio'
|| value === 'lipsync'
}
function stableStringify(value: unknown): string {
return JSON.stringify(value)
}
function parseCustomModels(raw: string | null): { ok: true; value: unknown[] } | { ok: false } {
if (!raw) return { ok: true, value: [] }
try {
const parsed = JSON.parse(raw) as unknown
if (!Array.isArray(parsed)) return { ok: false }
return { ok: true, value: parsed }
} catch {
return { ok: false }
}
}
function normalizeModel(
raw: unknown,
): { normalized: NormalizedModel | null; changed: boolean; issue?: Omit<MigrationIssue, 'table' | 'rowId'> } {
if (!isRecord(raw)) {
return {
normalized: null,
changed: false,
issue: {
field: 'customModels',
kind: 'MODEL_SHAPE_INVALID',
message: 'customModels item must be object',
},
}
}
const modelType = raw.type
if (!isUnifiedModelType(modelType)) {
return {
normalized: null,
changed: false,
issue: {
field: 'customModels.type',
kind: 'MODEL_TYPE_INVALID',
rawValue: String(raw.type ?? ''),
message: 'custom model type must be llm/image/video/audio/lipsync',
},
}
}
const providerField = toTrimmedString(raw.provider)
const modelIdField = toTrimmedString(raw.modelId)
const parsedModelKey = parseModelKeyStrict(toTrimmedString(raw.modelKey))
const provider = providerField || parsedModelKey?.provider || ''
const modelId = modelIdField || parsedModelKey?.modelId || ''
const modelKey = composeModelKey(provider, modelId)
if (!modelKey) {
return {
normalized: null,
changed: false,
issue: {
field: 'customModels.modelKey',
kind: 'MODEL_KEY_INCOMPLETE',
rawValue: toTrimmedString(raw.modelKey),
message: 'provider/modelId/modelKey cannot compose a valid model_key',
},
}
}
if (parsedModelKey && parsedModelKey.modelKey !== modelKey) {
return {
normalized: null,
changed: false,
issue: {
field: 'customModels.modelKey',
kind: 'MODEL_KEY_MISMATCH',
rawValue: toTrimmedString(raw.modelKey),
message: 'modelKey conflicts with provider/modelId',
},
}
}
const rawResolution = toTrimmedString(raw.resolution)
const resolution = rawResolution === '2K' || rawResolution === '4K' ? rawResolution : undefined
const capabilities = isRecord(raw.capabilities)
? ({ ...(raw.capabilities as ModelCapabilities) })
: undefined
const capabilityIssues = validateModelCapabilities(modelType, capabilities)
if (capabilityIssues.length > 0) {
const firstIssue = capabilityIssues[0]
return {
normalized: null,
changed: false,
issue: {
field: firstIssue.field,
kind: 'MODEL_CAPABILITY_INVALID',
message: `${firstIssue.code}: ${firstIssue.message}`,
},
}
}
const name = toTrimmedString(raw.name) || modelId
const price = typeof raw.price === 'number' && Number.isFinite(raw.price) ? raw.price : 0
const normalized: NormalizedModel = {
provider,
modelId,
modelKey,
name,
type: modelType,
price,
...(resolution ? { resolution } : {}),
...(capabilities ? { capabilities } : {}),
}
const changed = stableStringify(raw) !== stableStringify(normalized)
return { normalized, changed }
}
function addIssue(report: MigrationReport, issue: MigrationIssue) {
if (report.issues.length >= MAX_ISSUES) return
report.issues.push(issue)
}
function normalizeModelFieldValue(
rawValue: string | null,
field: ModelField,
mappingByModelId: Map<string, string[]>,
): { nextValue: string | null; changed: boolean; issue?: Omit<MigrationIssue, 'table' | 'rowId'> } {
if (!rawValue || !rawValue.trim()) return { nextValue: null, changed: rawValue !== null }
const trimmed = rawValue.trim()
const parsed = parseModelKeyStrict(trimmed)
if (parsed) {
return { nextValue: parsed.modelKey, changed: parsed.modelKey !== rawValue }
}
const candidates = mappingByModelId.get(trimmed) || []
if (candidates.length === 1) {
return { nextValue: candidates[0], changed: candidates[0] !== rawValue }
}
if (candidates.length === 0) {
const mappedModelKey = LEGACY_MODEL_ID_MAP.get(trimmed)
if (mappedModelKey) {
return { nextValue: mappedModelKey, changed: mappedModelKey !== rawValue }
}
}
if (candidates.length === 0) {
return {
nextValue: rawValue,
changed: false,
issue: {
field,
kind: 'LEGACY_MODEL_ID_NOT_FOUND',
rawValue,
message: `${field} legacy modelId cannot be mapped`,
},
}
}
return {
nextValue: rawValue,
changed: false,
issue: {
field,
kind: 'LEGACY_MODEL_ID_AMBIGUOUS',
rawValue,
candidates,
message: `${field} legacy modelId maps to multiple providers`,
},
}
}
async function main() {
const reportPath = parseReportPathArg()
const report: MigrationReport = {
generatedAt: new Date().toISOString(),
mode: APPLY ? 'apply' : 'dry-run',
userPreference: {
scanned: 0,
updated: 0,
updatedCustomModels: 0,
updatedDefaultFields: 0,
},
novelPromotionProject: {
scanned: 0,
updated: 0,
updatedFields: 0,
},
issues: [],
}
const userPrefs = await prisma.userPreference.findMany({
select: {
id: true,
userId: true,
customModels: true,
analysisModel: true,
characterModel: true,
locationModel: true,
storyboardModel: true,
editModel: true,
videoModel: true,
},
})
const userMappings = new Map<string, Map<string, string[]>>()
for (const pref of userPrefs) {
report.userPreference.scanned += 1
const updateData: Partial<Record<ModelField | 'customModels', string | null>> = {}
const parsedCustomModels = parseCustomModels(pref.customModels)
const normalizedModels: NormalizedModel[] = []
let customModelsChanged = false
if (!parsedCustomModels.ok) {
addIssue(report, {
table: 'userPreference',
rowId: pref.id,
userId: pref.userId,
field: 'customModels',
kind: 'CUSTOM_MODELS_JSON_INVALID',
rawValue: pref.customModels,
message: 'customModels JSON is invalid',
})
} else {
for (let index = 0; index < parsedCustomModels.value.length; index += 1) {
const normalizedResult = normalizeModel(parsedCustomModels.value[index])
if (normalizedResult.issue) {
addIssue(report, {
table: 'userPreference',
rowId: pref.id,
userId: pref.userId,
...normalizedResult.issue,
})
continue
}
if (normalizedResult.normalized) {
normalizedModels.push(normalizedResult.normalized)
if (normalizedResult.changed) customModelsChanged = true
}
}
}
const mappingByModelId = new Map<string, string[]>()
for (const model of normalizedModels) {
const existing = mappingByModelId.get(model.modelId) || []
if (!existing.includes(model.modelKey)) existing.push(model.modelKey)
mappingByModelId.set(model.modelId, existing)
}
userMappings.set(pref.userId, mappingByModelId)
if (customModelsChanged) {
updateData.customModels = JSON.stringify(normalizedModels)
report.userPreference.updatedCustomModels += 1
}
for (const field of MODEL_FIELDS) {
const normalizedField = normalizeModelFieldValue(pref[field], field, mappingByModelId)
if (normalizedField.issue) {
addIssue(report, {
table: 'userPreference',
rowId: pref.id,
userId: pref.userId,
...normalizedField.issue,
})
}
if (normalizedField.changed) {
updateData[field] = normalizedField.nextValue
report.userPreference.updatedDefaultFields += 1
}
}
if (Object.keys(updateData).length > 0) {
report.userPreference.updated += 1
if (APPLY) {
await prisma.userPreference.update({
where: { id: pref.id },
data: updateData,
})
}
}
}
const projects = await prisma.novelPromotionProject.findMany({
select: {
id: true,
projectId: true,
analysisModel: true,
characterModel: true,
locationModel: true,
storyboardModel: true,
editModel: true,
videoModel: true,
project: {
select: {
userId: true,
},
},
},
})
for (const row of projects as ProjectRow[]) {
report.novelPromotionProject.scanned += 1
const mappingByModelId = userMappings.get(row.project.userId) || new Map<string, string[]>()
const updateData: Partial<Record<ModelField, string | null>> = {}
for (const field of MODEL_FIELDS) {
const normalizedField = normalizeModelFieldValue(row[field], field, mappingByModelId)
if (normalizedField.issue) {
addIssue(report, {
table: 'novelPromotionProject',
rowId: row.id,
userId: row.project.userId,
...normalizedField.issue,
})
}
if (normalizedField.changed) {
updateData[field] = normalizedField.nextValue
report.novelPromotionProject.updatedFields += 1
}
}
if (Object.keys(updateData).length > 0) {
report.novelPromotionProject.updated += 1
if (APPLY) {
await prisma.novelPromotionProject.update({
where: { id: row.id },
data: updateData,
})
}
}
}
const absoluteReportPath = path.isAbsolute(reportPath)
? reportPath
: path.join(process.cwd(), reportPath)
fs.mkdirSync(path.dirname(absoluteReportPath), { recursive: true })
fs.writeFileSync(absoluteReportPath, `${JSON.stringify(report, null, 2)}\n`, 'utf8')
process.stdout.write(
`[migrate-model-config-contract] mode=${report.mode} ` +
`prefs=${report.userPreference.scanned}/${report.userPreference.updated} ` +
`projects=${report.novelPromotionProject.scanned}/${report.novelPromotionProject.updated} ` +
`issues=${report.issues.length} report=${absoluteReportPath}\n`,
)
}
main()
.catch((error) => {
process.stderr.write(`[migrate-model-config-contract] failed: ${String(error)}\n`)
process.exitCode = 1
})
.finally(async () => {
await prisma.$disconnect()
})

File diff suppressed because it is too large Load Diff

View File

@@ -0,0 +1,16 @@
{
"generatedAt": "2026-02-12T12:53:18.381Z",
"mode": "apply",
"userPreference": {
"scanned": 7,
"updated": 4,
"updatedCustomModels": 0,
"updatedDefaultFields": 24
},
"novelPromotionProject": {
"scanned": 70,
"updated": 40,
"updatedFields": 106
},
"issues": []
}

View File

@@ -0,0 +1,16 @@
{
"generatedAt": "2026-02-12T12:53:12.288Z",
"mode": "dry-run",
"userPreference": {
"scanned": 7,
"updated": 4,
"updatedCustomModels": 0,
"updatedDefaultFields": 24
},
"novelPromotionProject": {
"scanned": 70,
"updated": 40,
"updatedFields": 106
},
"issues": []
}

File diff suppressed because it is too large Load Diff

View File

@@ -0,0 +1,60 @@
#!/bin/bash
# ============================================================
# 开源版本发布脚本 (orphan branch 方式,无 git 历史)
# 用法: bash scripts/publish-opensource.sh
# ============================================================
set -e
echo ""
echo "🚀 开始发布开源版本..."
# 确保当前在 main 分支,且工作区干净
CURRENT_BRANCH=$(git rev-parse --abbrev-ref HEAD)
if [ "$CURRENT_BRANCH" != "main" ]; then
echo "❌ 请先切换到 main 分支再运行发布脚本"
exit 1
fi
if ! git diff --quiet || ! git diff --cached --quiet; then
echo "❌ 工作区有未提交的改动,请先 commit 再发布"
exit 1
fi
# 1. 创建无历史的孤儿分支
echo "📦 创建干净的孤儿分支..."
git checkout --orphan release-public
# 2. 暂存所有文件(.gitignore 自动排除 logs、data 等)
git add -A
# 3. 从提交中移除不应公开的内容
echo "🧹 清理私有内容..."
git rm --cached .env -f 2>/dev/null || true # 本地 env含真实配置
git rm -r --cached .github/workflows/ 2>/dev/null || true # CI 流水线(不对外)
git rm -r --cached .agent/ 2>/dev/null || true # AI 工具目录
git rm -r --cached .artifacts/ 2>/dev/null || true # AI 工具数据
git rm -r --cached .shared/ 2>/dev/null || true # AI 工具数据
# 4. 提交快照
TIMESTAMP=$(date '+%Y-%m-%d %H:%M:%S')
git commit -m "release: opensource snapshot $TIMESTAMP"
echo "✅ 快照 commit 已创建"
# 5. 强推到公开仓库的 main 分支
echo "⬆️ 推送到公开仓库..."
git push public release-public:main --force
echo ""
echo "=============================================="
echo "✅ 开源版本发布成功!"
echo "🔗 https://github.com/saturndec/waoowaoo"
echo "=============================================="
echo ""
# 6. 切回 main 分支,删除临时孤儿分支
git checkout main
git branch -D release-public
echo "🔙 已切回 main 分支,孤儿分支已清理"
echo ""

View File

@@ -0,0 +1,53 @@
import { logInfo as _ulogInfo, logError as _ulogError } from '@/lib/logging/core'
import { prisma } from '@/lib/prisma'
function parseMinutesArg() {
const raw = process.argv.find((arg) => arg.startsWith('--minutes='))
const value = raw ? Number.parseInt(raw.split('=')[1], 10) : 5
return Number.isFinite(value) && value > 0 ? value : 5
}
async function main() {
const minutes = parseMinutesArg()
const since = new Date(Date.now() - minutes * 60_000)
const rows = await prisma.task.groupBy({
by: ['errorCode'],
where: {
status: 'failed',
finishedAt: { gte: since },
},
_count: {
_all: true,
},
orderBy: {
_count: {
errorCode: 'desc',
},
},
})
const total = rows.reduce((sum: number, row) => sum + (row._count?._all || 0), 0)
_ulogInfo(`[TaskErrorStats] window=${minutes}m failed_total=${total}`)
if (!rows.length) {
_ulogInfo('No failed tasks in the selected window.')
return
}
for (const row of rows) {
const code = row.errorCode || 'UNKNOWN'
const count = row?._count?._all || 0
const ratio = total > 0 ? ((count / total) * 100).toFixed(1) : '0.0'
_ulogInfo(`${code}\t${count}\t${ratio}%`)
}
}
main()
.catch((error) => {
_ulogError('[TaskErrorStats] failed:', error?.message || error)
process.exit(1)
})
.finally(async () => {
await prisma.$disconnect()
})

View File

@@ -0,0 +1,41 @@
#!/usr/bin/env bash
set -euo pipefail
if [ "$#" -eq 0 ]; then
echo "[regression-runner] missing command"
exit 2
fi
LOG_FILE="$(mktemp -t regression-runner.XXXXXX.log)"
set +e
"$@" 2>&1 | tee "$LOG_FILE"
CMD_STATUS=${PIPESTATUS[0]}
set -e
if [ "$CMD_STATUS" -ne 0 ]; then
echo
echo "[regression-runner] regression failed, collecting diagnostics..."
FAILED_FILES="$(grep -E '^ FAIL ' "$LOG_FILE" | sed -E 's/^ FAIL ([^ ]+).*/\1/' | sort -u || true)"
if [ -z "$FAILED_FILES" ]; then
echo "[regression-runner] no explicit FAIL file lines found in output"
else
echo "[regression-runner] failed files:"
while IFS= read -r file; do
[ -z "$file" ] && continue
echo " - $file"
LAST_COMMIT="$(git log -n 1 --format='%h %ad %an %s' --date=short -- "$file" || true)"
FIRST_COMMIT="$(git log --diff-filter=A --follow --format='%h %ad %an %s' --date=short -- "$file" | tail -n 1 || true)"
if [ -n "$LAST_COMMIT" ]; then
echo " latest: $LAST_COMMIT"
fi
if [ -n "$FIRST_COMMIT" ]; then
echo " first: $FIRST_COMMIT"
fi
done <<< "$FAILED_FILES"
fi
fi
rm -f "$LOG_FILE"
exit "$CMD_STATUS"

View File

@@ -0,0 +1,41 @@
import { PrismaClient } from '@prisma/client';
const p = new PrismaClient();
setTimeout(() => { console.error('TIMEOUT'); process.exit(1); }, 30000);
const userId = '3d84c341-87d7-4165-971d-a3f6c576aa21';
const needle = 'gemini-compatible:5b127c32-136e-4e5a-af74-8bae3e28be7a';
const modelFields = ['characterModel', 'locationModel', 'storyboardModel', 'editModel'];
// novelPromotionData is a relation, query directly
const npProjects = await p.novelPromotionProject.findMany({
where: { project: { userId } },
select: { id: true, projectId: true, characterModel: true, locationModel: true, storyboardModel: true, editModel: true, project: { select: { name: true } } }
});
let totalCleaned = 0;
for (const np of npProjects) {
const updates = {};
const cleanedFields = [];
for (const field of modelFields) {
if (typeof np[field] === 'string' && np[field].includes(needle)) {
updates[field] = '';
cleanedFields.push(`${field}: ${np[field]}`);
}
}
if (cleanedFields.length > 0) {
await p.novelPromotionProject.update({
where: { id: np.id },
data: updates
});
console.log(`${np.project.name} (${np.projectId}): cleared ${cleanedFields.length} fields`);
cleanedFields.forEach(f => console.log(` - ${f}`));
totalCleaned++;
}
}
console.log(`\nDone. Cleaned ${totalCleaned} projects.`);
await p.$disconnect();
process.exit(0);

View File

@@ -0,0 +1,43 @@
import { PrismaClient } from '@prisma/client';
const p = new PrismaClient();
setTimeout(() => { console.error('TIMEOUT'); process.exit(1); }, 15000);
const userId = '3d84c341-87d7-4165-971d-a3f6c576aa21';
const needle = 'gemini-compatible:5b';
// 1. Check userPreference default models
const pref = await p.userPreference.findUnique({
where: { userId },
select: { analysisModel: true, characterModel: true, locationModel: true, storyboardModel: true, editModel: true, videoModel: true }
});
console.log('=== UserPreference defaults ===');
let found = false;
for (const [k, v] of Object.entries(pref || {})) {
if (typeof v === 'string' && v.includes(needle)) {
console.log(' FOUND in', k, ':', v);
found = true;
}
}
if (!found) console.log(' (clean)');
// 2. Check novelPromotionData JSON for any reference
const projects = await p.project.findMany({
where: { userId },
select: { id: true, name: true, novelPromotionData: true }
});
console.log('\n=== Project novelPromotionData ===');
for (const proj of projects) {
const data = JSON.stringify(proj.novelPromotionData || {});
if (data.includes(needle)) {
// Find which keys reference it
const parsed = proj.novelPromotionData;
for (const [k, v] of Object.entries(parsed || {})) {
if (typeof v === 'string' && v.includes(needle)) {
console.log(' FOUND in project', proj.id, '(' + proj.name + ') field:', k, '=', v);
}
}
}
}
await p.$disconnect();
process.exit(0);

216
scripts/watchdog.ts Normal file
View File

@@ -0,0 +1,216 @@
import { createScopedLogger } from '@/lib/logging/core'
import { prisma } from '@/lib/prisma'
import { addTaskJob } from '@/lib/task/queues'
import { resolveTaskLocaleFromBody } from '@/lib/task/resolve-locale'
import { markTaskFailed } from '@/lib/task/service'
import { publishTaskEvent } from '@/lib/task/publisher'
import { TASK_EVENT_TYPE, TASK_TYPE, type TaskType } from '@/lib/task/types'
const INTERVAL_MS = Number.parseInt(process.env.WATCHDOG_INTERVAL_MS || '30000', 10) || 30000
const HEARTBEAT_TIMEOUT_MS = Number.parseInt(process.env.TASK_HEARTBEAT_TIMEOUT_MS || '90000', 10) || 90000
const TASK_TYPE_SET: ReadonlySet<string> = new Set(Object.values(TASK_TYPE))
const logger = createScopedLogger({
module: 'watchdog',
action: 'watchdog.tick',
})
function toTaskType(value: string): TaskType | null {
if (TASK_TYPE_SET.has(value)) {
return value as TaskType
}
return null
}
function toTaskPayload(value: unknown): Record<string, unknown> | null {
if (value && typeof value === 'object' && !Array.isArray(value)) {
return value as Record<string, unknown>
}
return null
}
async function recoverQueuedTasks() {
const rows = await prisma.task.findMany({
where: {
status: 'queued',
enqueuedAt: null,
},
take: 100,
orderBy: { createdAt: 'asc' },
})
for (const task of rows) {
const taskType = toTaskType(task.type)
if (!taskType) {
logger.error({
action: 'watchdog.reenqueue_invalid_type',
message: `invalid task type: ${task.type}`,
taskId: task.id,
projectId: task.projectId,
userId: task.userId,
errorCode: 'INVALID_PARAMS',
retryable: false,
})
continue
}
try {
const locale = resolveTaskLocaleFromBody(task.payload)
if (!locale) {
await markTaskFailed(task.id, 'TASK_LOCALE_REQUIRED', 'task locale is missing')
logger.error({
action: 'watchdog.reenqueue_locale_missing',
message: 'task locale is missing',
taskId: task.id,
projectId: task.projectId,
userId: task.userId,
errorCode: 'TASK_LOCALE_REQUIRED',
retryable: false,
})
continue
}
await addTaskJob({
taskId: task.id,
type: taskType,
locale,
projectId: task.projectId,
episodeId: task.episodeId,
targetType: task.targetType,
targetId: task.targetId,
payload: toTaskPayload(task.payload),
userId: task.userId,
})
await prisma.task.update({
where: { id: task.id },
data: {
enqueuedAt: new Date(),
enqueueAttempts: { increment: 1 },
lastEnqueueError: null,
},
})
logger.info({
action: 'watchdog.reenqueue',
message: 'watchdog re-enqueued queued task',
taskId: task.id,
projectId: task.projectId,
userId: task.userId,
details: {
type: task.type,
targetType: task.targetType,
targetId: task.targetId,
},
})
} catch (error: unknown) {
const message = error instanceof Error ? error.message : 're-enqueue failed'
await prisma.task.update({
where: { id: task.id },
data: {
enqueueAttempts: { increment: 1 },
lastEnqueueError: message,
},
})
logger.error({
action: 'watchdog.reenqueue_failed',
message,
taskId: task.id,
projectId: task.projectId,
userId: task.userId,
errorCode: 'EXTERNAL_ERROR',
retryable: true,
})
}
}
}
async function cleanupZombieProcessingTasks() {
const timeoutAt = new Date(Date.now() - HEARTBEAT_TIMEOUT_MS)
const rows = await prisma.task.findMany({
where: {
status: 'processing',
heartbeatAt: { lt: timeoutAt },
},
take: 100,
})
for (const task of rows) {
if ((task.attempt || 0) >= (task.maxAttempts || 5)) {
await markTaskFailed(task.id, 'WATCHDOG_TIMEOUT', 'Task heartbeat timeout')
await publishTaskEvent({
taskId: task.id,
projectId: task.projectId,
userId: task.userId,
type: TASK_EVENT_TYPE.FAILED,
payload: { reason: 'watchdog_timeout' },
})
logger.error({
action: 'watchdog.fail_timeout',
message: 'watchdog marked task as failed due to heartbeat timeout',
taskId: task.id,
projectId: task.projectId,
userId: task.userId,
errorCode: 'WATCHDOG_TIMEOUT',
retryable: true,
})
continue
}
await prisma.task.update({
where: { id: task.id },
data: {
status: 'queued',
enqueuedAt: null,
heartbeatAt: null,
startedAt: null,
},
})
await publishTaskEvent({
taskId: task.id,
projectId: task.projectId,
userId: task.userId,
type: TASK_EVENT_TYPE.CREATED,
payload: { reason: 'watchdog_requeue' },
})
logger.warn({
action: 'watchdog.requeue_processing',
message: 'watchdog re-queued stalled processing task',
taskId: task.id,
projectId: task.projectId,
userId: task.userId,
retryable: true,
})
}
}
async function tick() {
const startedAt = Date.now()
try {
await recoverQueuedTasks()
await cleanupZombieProcessingTasks()
logger.info({
action: 'watchdog.tick.ok',
message: 'watchdog tick completed',
durationMs: Date.now() - startedAt,
})
} catch (error: unknown) {
const message = error instanceof Error ? error.message : 'watchdog tick failed'
logger.error({
action: 'watchdog.tick.failed',
message,
durationMs: Date.now() - startedAt,
errorCode: 'INTERNAL_ERROR',
retryable: true,
})
}
}
logger.info({
action: 'watchdog.started',
message: 'watchdog started',
details: {
intervalMs: INTERVAL_MS,
heartbeatTimeoutMs: HEARTBEAT_TIMEOUT_MS,
},
})
void tick()
setInterval(() => {
void tick()
}, INTERVAL_MS)