feat: implement robustness guards
This commit is contained in:
116
scripts/guards/api-route-contract-guard.mjs
Normal file
116
scripts/guards/api-route-contract-guard.mjs
Normal file
@@ -0,0 +1,116 @@
|
||||
#!/usr/bin/env node
|
||||
|
||||
import fs from 'fs'
|
||||
import path from 'path'
|
||||
import process from 'process'
|
||||
import { pathToFileURL } from 'url'
|
||||
|
||||
const root = process.cwd()
|
||||
const apiDir = path.join(root, 'src', 'app', 'api')
|
||||
|
||||
export const API_HANDLER_ALLOWLIST = new Set([
|
||||
'src/app/api/auth/[...nextauth]/route.ts',
|
||||
'src/app/api/files/[...path]/route.ts',
|
||||
'src/app/api/system/boot-id/route.ts',
|
||||
])
|
||||
|
||||
export const PUBLIC_ROUTE_ALLOWLIST = new Set([
|
||||
'src/app/api/auth/[...nextauth]/route.ts',
|
||||
'src/app/api/auth/register/route.ts',
|
||||
'src/app/api/cos/image/route.ts',
|
||||
'src/app/api/files/[...path]/route.ts',
|
||||
'src/app/api/storage/sign/route.ts',
|
||||
'src/app/api/system/boot-id/route.ts',
|
||||
])
|
||||
|
||||
const AUTH_CALL_PATTERNS = [
|
||||
/\brequireUserAuth\s*\(/,
|
||||
/\brequireProjectAuth\s*\(/,
|
||||
/\brequireProjectAuthLight\s*\(/,
|
||||
]
|
||||
|
||||
function fail(title, details = []) {
|
||||
process.stderr.write(`\n[api-route-contract-guard] ${title}\n`)
|
||||
for (const detail of details) {
|
||||
process.stderr.write(` - ${detail}\n`)
|
||||
}
|
||||
process.exit(1)
|
||||
}
|
||||
|
||||
function walk(dir, out = []) {
|
||||
if (!fs.existsSync(dir)) return out
|
||||
const entries = fs.readdirSync(dir, { withFileTypes: true })
|
||||
for (const entry of entries) {
|
||||
if (entry.name === '.git' || entry.name === '.next' || entry.name === 'node_modules') continue
|
||||
const fullPath = path.join(dir, entry.name)
|
||||
if (entry.isDirectory()) {
|
||||
walk(fullPath, out)
|
||||
continue
|
||||
}
|
||||
if (entry.name === 'route.ts') out.push(fullPath)
|
||||
}
|
||||
return out
|
||||
}
|
||||
|
||||
function toRel(fullPath) {
|
||||
return path.relative(root, fullPath).split(path.sep).join('/')
|
||||
}
|
||||
|
||||
function hasApiHandlerWrapper(content) {
|
||||
return /\bapiHandler\s*\(/.test(content)
|
||||
}
|
||||
|
||||
function hasRequiredAuth(content) {
|
||||
return AUTH_CALL_PATTERNS.some((pattern) => pattern.test(content))
|
||||
}
|
||||
|
||||
export function inspectRouteContract(relPath, content) {
|
||||
const violations = []
|
||||
|
||||
if (!API_HANDLER_ALLOWLIST.has(relPath) && !hasApiHandlerWrapper(content)) {
|
||||
violations.push(`${relPath} missing apiHandler wrapper`)
|
||||
}
|
||||
|
||||
if (!PUBLIC_ROUTE_ALLOWLIST.has(relPath) && !hasRequiredAuth(content)) {
|
||||
violations.push(`${relPath} missing requireUserAuth/requireProjectAuth/requireProjectAuthLight`)
|
||||
}
|
||||
|
||||
return violations
|
||||
}
|
||||
|
||||
export function findApiRouteContractViolations(scanRoot = root) {
|
||||
const routesRoot = path.join(scanRoot, 'src', 'app', 'api')
|
||||
return walk(routesRoot)
|
||||
.map((fullPath) => {
|
||||
const relPath = path.relative(scanRoot, fullPath).split(path.sep).join('/')
|
||||
const content = fs.readFileSync(fullPath, 'utf8')
|
||||
return inspectRouteContract(relPath, content)
|
||||
})
|
||||
.flat()
|
||||
}
|
||||
|
||||
export function main() {
|
||||
if (!fs.existsSync(apiDir)) {
|
||||
fail('Missing src/app/api directory')
|
||||
}
|
||||
|
||||
const violations = walk(apiDir)
|
||||
.map((fullPath) => {
|
||||
const relPath = toRel(fullPath)
|
||||
const content = fs.readFileSync(fullPath, 'utf8')
|
||||
return inspectRouteContract(relPath, content)
|
||||
})
|
||||
.flat()
|
||||
|
||||
if (violations.length > 0) {
|
||||
fail('Found API route contract violations', violations)
|
||||
}
|
||||
|
||||
process.stdout.write(
|
||||
`[api-route-contract-guard] OK routes=${walk(apiDir).length} public=${PUBLIC_ROUTE_ALLOWLIST.size} apiHandlerExceptions=${API_HANDLER_ALLOWLIST.size}\n`,
|
||||
)
|
||||
}
|
||||
|
||||
if (process.argv[1] && import.meta.url === pathToFileURL(process.argv[1]).href) {
|
||||
main()
|
||||
}
|
||||
101
scripts/guards/image-reference-normalization-guard.mjs
Normal file
101
scripts/guards/image-reference-normalization-guard.mjs
Normal file
@@ -0,0 +1,101 @@
|
||||
#!/usr/bin/env node
|
||||
|
||||
import fs from 'fs'
|
||||
import path from 'path'
|
||||
import process from 'process'
|
||||
import { pathToFileURL } from 'url'
|
||||
|
||||
const root = process.cwd()
|
||||
const handlersDir = path.join(root, 'src', 'lib', 'workers', 'handlers')
|
||||
|
||||
export const NORMALIZATION_HELPER_ALLOWLIST = new Set([
|
||||
'src/lib/workers/handlers/image-task-handler-shared.ts',
|
||||
])
|
||||
|
||||
const ACCEPTED_NORMALIZATION_MARKERS = [
|
||||
/\bnormalizeReferenceImagesForGeneration\s*\(/,
|
||||
/\bnormalizeToBase64ForGeneration\s*\(/,
|
||||
/\bgenerateLabeledImageToCos\s*\(/,
|
||||
]
|
||||
|
||||
function fail(title, details = []) {
|
||||
process.stderr.write(`\n[image-reference-normalization-guard] ${title}\n`)
|
||||
for (const detail of details) {
|
||||
process.stderr.write(` - ${detail}\n`)
|
||||
}
|
||||
process.exit(1)
|
||||
}
|
||||
|
||||
function walk(dir, out = []) {
|
||||
if (!fs.existsSync(dir)) return out
|
||||
const entries = fs.readdirSync(dir, { withFileTypes: true })
|
||||
for (const entry of entries) {
|
||||
if (entry.name === '.git' || entry.name === '.next' || entry.name === 'node_modules') continue
|
||||
const fullPath = path.join(dir, entry.name)
|
||||
if (entry.isDirectory()) {
|
||||
walk(fullPath, out)
|
||||
continue
|
||||
}
|
||||
if (entry.name.endsWith('.ts')) out.push(fullPath)
|
||||
}
|
||||
return out
|
||||
}
|
||||
|
||||
function toRel(fullPath) {
|
||||
return path.relative(root, fullPath).split(path.sep).join('/')
|
||||
}
|
||||
|
||||
function usesGenerationReferenceImages(content) {
|
||||
return /\bresolveImageSourceFromGeneration\s*\(/.test(content) && /\breferenceImages\s*:/.test(content)
|
||||
}
|
||||
|
||||
function hasNormalizationMarker(content) {
|
||||
return ACCEPTED_NORMALIZATION_MARKERS.some((pattern) => pattern.test(content))
|
||||
}
|
||||
|
||||
export function inspectImageReferenceNormalization(relPath, content) {
|
||||
if (NORMALIZATION_HELPER_ALLOWLIST.has(relPath)) return []
|
||||
if (!usesGenerationReferenceImages(content)) return []
|
||||
if (hasNormalizationMarker(content)) return []
|
||||
return [
|
||||
`${relPath} uses resolveImageSourceFromGeneration with referenceImages but does not reference normalizeReferenceImagesForGeneration/normalizeToBase64ForGeneration/generateLabeledImageToCos`,
|
||||
]
|
||||
}
|
||||
|
||||
export function findImageReferenceNormalizationViolations(scanRoot = root) {
|
||||
const scanDir = path.join(scanRoot, 'src', 'lib', 'workers', 'handlers')
|
||||
return walk(scanDir)
|
||||
.map((fullPath) => {
|
||||
const relPath = path.relative(scanRoot, fullPath).split(path.sep).join('/')
|
||||
const content = fs.readFileSync(fullPath, 'utf8')
|
||||
return inspectImageReferenceNormalization(relPath, content)
|
||||
})
|
||||
.flat()
|
||||
}
|
||||
|
||||
export function main() {
|
||||
if (!fs.existsSync(handlersDir)) {
|
||||
fail('Missing src/lib/workers/handlers directory')
|
||||
}
|
||||
|
||||
const handlerFiles = walk(handlersDir)
|
||||
const violations = handlerFiles
|
||||
.map((fullPath) => {
|
||||
const relPath = toRel(fullPath)
|
||||
const content = fs.readFileSync(fullPath, 'utf8')
|
||||
return inspectImageReferenceNormalization(relPath, content)
|
||||
})
|
||||
.flat()
|
||||
|
||||
if (violations.length > 0) {
|
||||
fail('Found image reference normalization violations', violations)
|
||||
}
|
||||
|
||||
process.stdout.write(
|
||||
`[image-reference-normalization-guard] OK handlers=${handlerFiles.length} allowlist=${NORMALIZATION_HELPER_ALLOWLIST.size}\n`,
|
||||
)
|
||||
}
|
||||
|
||||
if (process.argv[1] && import.meta.url === pathToFileURL(process.argv[1]).href) {
|
||||
main()
|
||||
}
|
||||
84
scripts/guards/task-submit-compensation-guard.mjs
Normal file
84
scripts/guards/task-submit-compensation-guard.mjs
Normal file
@@ -0,0 +1,84 @@
|
||||
#!/usr/bin/env node
|
||||
|
||||
import fs from 'fs'
|
||||
import path from 'path'
|
||||
import process from 'process'
|
||||
import { pathToFileURL } from 'url'
|
||||
|
||||
const root = process.cwd()
|
||||
const apiDir = path.join(root, 'src', 'app', 'api')
|
||||
const CREATE_PATTERN = /\.\s*create\s*\(/
|
||||
const SUBMIT_TASK_PATTERN = /\bsubmitTask\s*\(/
|
||||
const ROLLBACK_PATTERN = /rollback|compensat/i
|
||||
|
||||
function fail(title, details = []) {
|
||||
process.stderr.write(`\n[task-submit-compensation-guard] ${title}\n`)
|
||||
for (const detail of details) {
|
||||
process.stderr.write(` - ${detail}\n`)
|
||||
}
|
||||
process.exit(1)
|
||||
}
|
||||
|
||||
function walk(dir, out = []) {
|
||||
if (!fs.existsSync(dir)) return out
|
||||
const entries = fs.readdirSync(dir, { withFileTypes: true })
|
||||
for (const entry of entries) {
|
||||
if (entry.name === '.git' || entry.name === '.next' || entry.name === 'node_modules') continue
|
||||
const fullPath = path.join(dir, entry.name)
|
||||
if (entry.isDirectory()) {
|
||||
walk(fullPath, out)
|
||||
continue
|
||||
}
|
||||
if (entry.name === 'route.ts') out.push(fullPath)
|
||||
}
|
||||
return out
|
||||
}
|
||||
|
||||
function toRel(fullPath) {
|
||||
return path.relative(root, fullPath).split(path.sep).join('/')
|
||||
}
|
||||
|
||||
export function inspectTaskSubmitCompensation(relPath, content) {
|
||||
if (!CREATE_PATTERN.test(content)) return []
|
||||
if (!SUBMIT_TASK_PATTERN.test(content)) return []
|
||||
if (ROLLBACK_PATTERN.test(content)) return []
|
||||
return [
|
||||
`${relPath} creates data before submitTask without explicit rollback/compensation marker`,
|
||||
]
|
||||
}
|
||||
|
||||
export function findTaskSubmitCompensationViolations(scanRoot = root) {
|
||||
const routesRoot = path.join(scanRoot, 'src', 'app', 'api')
|
||||
return walk(routesRoot)
|
||||
.map((fullPath) => {
|
||||
const relPath = path.relative(scanRoot, fullPath).split(path.sep).join('/')
|
||||
const content = fs.readFileSync(fullPath, 'utf8')
|
||||
return inspectTaskSubmitCompensation(relPath, content)
|
||||
})
|
||||
.flat()
|
||||
}
|
||||
|
||||
export function main() {
|
||||
if (!fs.existsSync(apiDir)) {
|
||||
fail('Missing src/app/api directory')
|
||||
}
|
||||
|
||||
const routeFiles = walk(apiDir)
|
||||
const violations = routeFiles
|
||||
.map((fullPath) => {
|
||||
const relPath = toRel(fullPath)
|
||||
const content = fs.readFileSync(fullPath, 'utf8')
|
||||
return inspectTaskSubmitCompensation(relPath, content)
|
||||
})
|
||||
.flat()
|
||||
|
||||
if (violations.length > 0) {
|
||||
fail('Found create+submitTask routes without compensation marker', violations)
|
||||
}
|
||||
|
||||
process.stdout.write(`[task-submit-compensation-guard] OK routes=${routeFiles.length}\n`)
|
||||
}
|
||||
|
||||
if (process.argv[1] && import.meta.url === pathToFileURL(process.argv[1]).href) {
|
||||
main()
|
||||
}
|
||||
@@ -48,7 +48,7 @@ const LOG_LEVELS = { debug: 0, info: 1, warn: 2, error: 3 }
|
||||
function log(level: string, message: string, ...args: unknown[]) {
|
||||
if (LOG_LEVELS[level as keyof typeof LOG_LEVELS] >= LOG_LEVELS[CONFIG.options.logLevel as keyof typeof LOG_LEVELS]) {
|
||||
const timestamp = new Date().toISOString()
|
||||
console[level === 'error' ? 'error' : 'log'](\`[\${timestamp}] [\${level.toUpperCase()}] \${message}\`, ...args)
|
||||
console[level === 'error' ? 'error' : 'log'](`[${timestamp}] [${level.toUpperCase()}] ${message}`, ...args)
|
||||
}
|
||||
}
|
||||
|
||||
@@ -87,7 +87,7 @@ async function scanLocalFiles(dir: string, basePath = ''): Promise<Array<{localP
|
||||
}
|
||||
}
|
||||
} catch (err: unknown) {
|
||||
log('warn', \`无法读取目录: \${dir}\`, (err as Error).message)
|
||||
log('warn', `无法读取目录: ${dir}`, (err as Error).message)
|
||||
}
|
||||
|
||||
return files
|
||||
@@ -129,11 +129,11 @@ async function saveProgress(migratedKeys: Set<string>) {
|
||||
|
||||
// ==================== 存储桶检查/创建 ====================
|
||||
async function ensureBucket() {
|
||||
log('info', \`检查存储桶: \${CONFIG.minio.bucket}\`)
|
||||
log('info', `检查存储桶: ${CONFIG.minio.bucket}`)
|
||||
|
||||
const exists = await minioClient.bucketExists(CONFIG.minio.bucket)
|
||||
if (!exists) {
|
||||
log('info', \`创建存储桶: \${CONFIG.minio.bucket}\`)
|
||||
log('info', `创建存储桶: ${CONFIG.minio.bucket}`)
|
||||
await minioClient.makeBucket(CONFIG.minio.bucket, CONFIG.minio.region)
|
||||
|
||||
// 设置存储桶为 public read (可选,根据需求)
|
||||
@@ -144,7 +144,7 @@ async function ensureBucket() {
|
||||
Effect: 'Allow',
|
||||
Principal: { AWS: ['*'] },
|
||||
Action: ['s3:GetObject'],
|
||||
Resource: [\`arn:aws:s3:::\${CONFIG.minio.bucket}/*\`]
|
||||
Resource: [`arn:aws:s3:::${CONFIG.minio.bucket}/*`]
|
||||
}
|
||||
]
|
||||
}
|
||||
@@ -159,12 +159,12 @@ async function uploadFile(fileInfo: {localPath: string, key: string, size: numbe
|
||||
|
||||
// 检查是否已迁移
|
||||
if (migratedKeys.has(key)) {
|
||||
log('debug', \`跳过已迁移: \${key}\`)
|
||||
log('debug', `跳过已迁移: ${key}`)
|
||||
return { status: 'skipped', key }
|
||||
}
|
||||
|
||||
if (CONFIG.options.dryRun) {
|
||||
log('info', \`[DRY RUN] 将上传: \${key} (\${formatBytes(size)})\`)
|
||||
log('info', `[DRY RUN] 将上传: ${key} (${formatBytes(size)})`)
|
||||
return { status: 'dry_run', key }
|
||||
}
|
||||
|
||||
@@ -185,11 +185,11 @@ async function uploadFile(fileInfo: {localPath: string, key: string, size: numbe
|
||||
// 记录迁移成功
|
||||
migratedKeys.add(key)
|
||||
|
||||
log('info', \`✓ 上传成功: \${key} (\${formatBytes(size)})\`)
|
||||
log('info', `✓ 上传成功: ${key} (${formatBytes(size)})`)
|
||||
return { status: 'success', key, size }
|
||||
|
||||
} catch (err: unknown) {
|
||||
log('error', \`✗ 上传失败: \${key}\`, (err as Error).message)
|
||||
log('error', `✗ 上传失败: ${key}`, (err as Error).message)
|
||||
return { status: 'error', key, error: (err as Error).message }
|
||||
}
|
||||
}
|
||||
@@ -251,17 +251,17 @@ async function main() {
|
||||
console.log()
|
||||
|
||||
log('info', '配置信息:')
|
||||
log('info', \` 本地目录: \${path.resolve(CONFIG.local.baseDir)}\`)
|
||||
log('info', \` MinIO: \${CONFIG.minio.endPoint}:\${CONFIG.minio.port}/\${CONFIG.minio.bucket}\`)
|
||||
log('info', \` 并发数: \${CONFIG.options.concurrency}\`)
|
||||
log('info', \` 干运行: \${CONFIG.options.dryRun}\`)
|
||||
log('info', \` 断点续传: \${CONFIG.options.resume}\`)
|
||||
log('info', ` 本地目录: ${path.resolve(CONFIG.local.baseDir)}`)
|
||||
log('info', ` MinIO: ${CONFIG.minio.endPoint}:${CONFIG.minio.port}/${CONFIG.minio.bucket}`)
|
||||
log('info', ` 并发数: ${CONFIG.options.concurrency}`)
|
||||
log('info', ` 干运行: ${CONFIG.options.dryRun}`)
|
||||
log('info', ` 断点续传: ${CONFIG.options.resume}`)
|
||||
console.log()
|
||||
|
||||
// 1. 扫描本地文件
|
||||
log('info', '扫描本地文件...')
|
||||
const files = await scanLocalFiles(CONFIG.local.baseDir)
|
||||
log('info', \`找到 \${files.length} 个文件\`)
|
||||
log('info', `找到 ${files.length} 个文件`)
|
||||
|
||||
if (files.length === 0) {
|
||||
log('info', '没有需要迁移的文件')
|
||||
@@ -269,12 +269,12 @@ async function main() {
|
||||
}
|
||||
|
||||
const totalSize = files.reduce((sum, f) => sum + f.size, 0)
|
||||
log('info', \`总大小: \${formatBytes(totalSize)}\`)
|
||||
log('info', `总大小: ${formatBytes(totalSize)}`)
|
||||
console.log()
|
||||
|
||||
// 2. 加载进度
|
||||
const migratedKeys = await loadProgress()
|
||||
log('info', \`已迁移: \${migratedKeys.size} 个文件\`)
|
||||
log('info', `已迁移: ${migratedKeys.size} 个文件`)
|
||||
|
||||
// 3. 确保存储桶存在
|
||||
await ensureBucket()
|
||||
@@ -298,7 +298,7 @@ async function main() {
|
||||
if (processed % 10 === 0) {
|
||||
await saveProgress(migratedKeys)
|
||||
const progress = ((processed / files.length) * 100).toFixed(1)
|
||||
log('info', \`进度: \${progress}% (\${processed}/\${files.length})\`)
|
||||
log('info', `进度: ${progress}% (${processed}/${files.length})`)
|
||||
}
|
||||
|
||||
return result
|
||||
@@ -315,11 +315,11 @@ async function main() {
|
||||
console.log('╔══════════════════════════════════════════════════════════╗')
|
||||
console.log('║ 迁移完成 ║')
|
||||
console.log('╠══════════════════════════════════════════════════════════╣')
|
||||
console.log(\`║ 总文件数: \${String(files.length).padEnd(39)} ║\`)
|
||||
console.log(\`║ 成功: \${String(success).padEnd(39)} ║\`)
|
||||
console.log(\`║ 失败: \${String(failed).padEnd(39)} ║\`)
|
||||
console.log(\`║ 跳过: \${String(skipped).padEnd(39)} ║\`)
|
||||
console.log(\`║ 耗时: \${String(duration + 's').padEnd(39)} ║\`)
|
||||
console.log(`║ 总文件数: ${String(files.length).padEnd(39)} ║`)
|
||||
console.log(`║ 成功: ${String(success).padEnd(39)} ║`)
|
||||
console.log(`║ 失败: ${String(failed).padEnd(39)} ║`)
|
||||
console.log(`║ 跳过: ${String(skipped).padEnd(39)} ║`)
|
||||
console.log(`║ 耗时: ${String(duration + 's').padEnd(39)} ║`)
|
||||
console.log('╚══════════════════════════════════════════════════════════╝')
|
||||
|
||||
// 7. 后续步骤提示
|
||||
|
||||
Reference in New Issue
Block a user