refactor: analysis workflow architecture

fix: NEXTAUTH_URL

fix: prevent project model edits from affecting default model
This commit is contained in:
saturn
2026-03-16 21:48:57 +08:00
parent ecbd183a77
commit 9aff44e37a
58 changed files with 2753 additions and 7985 deletions

View File

@@ -91,7 +91,11 @@ export function useRebuildConfirm({
try {
const downstream = await checkStoryboardDownstreamData()
if (!downstream.shouldConfirm) {
await action()
try {
await action()
} finally {
setPendingActionType((current) => (current === actionType ? null : current))
}
return
}

View File

@@ -330,33 +330,6 @@ export const PATCH = apiHandler(async (
where: { projectId },
data: updateData})
// 同步更新用户偏好配置(配置字段)
const preferenceFields = [
'analysisModel', 'characterModel', 'locationModel', 'storyboardModel',
'editModel', 'videoModel', 'audioModel', 'videoRatio', 'artStyle', 'ttsRate',
] as const
const preferenceUpdate: Record<string, unknown> = {}
for (const field of preferenceFields) {
if (body[field] !== undefined) {
if ((MODEL_FIELDS as readonly string[]).includes(field)) {
validateModelKeyField(field as typeof MODEL_FIELDS[number], body[field])
}
if (field === 'artStyle') {
preferenceUpdate[field] = validateArtStyleField(body[field])
continue
}
preferenceUpdate[field] = body[field]
}
}
if (Object.keys(preferenceUpdate).length > 0) {
await prisma.userPreference.upsert({
where: { userId: session.user.id },
update: preferenceUpdate,
create: {
userId: session.user.id,
...preferenceUpdate}})
}
const novelPromotionDataWithSignedUrls = await attachMediaFieldsToProject(updatedNovelPromotionData)
const fullProject = {

View File

@@ -1,3 +1,4 @@
import { getInternalBaseUrl } from '@/lib/env'
import { logInfo as _ulogInfo, logError as _ulogError } from '@/lib/logging/core'
/**
* 火山引擎 API 统一调用工具
@@ -265,7 +266,7 @@ async function fetchWithTimeout(
let fullUrl = url
if (url.startsWith('/')) {
// 服务端 fetch 需要完整 URL使用 localhost:3000 作为基础地址
const baseUrl = process.env.NEXTAUTH_URL || 'http://localhost:3000'
const baseUrl = getInternalBaseUrl()
fullUrl = `${baseUrl}${url}`
}

View File

@@ -3,12 +3,26 @@
* 集中管理环境变量的获取,避免到处重复
*/
export function getPublicBaseUrl(): string {
return process.env.NEXTAUTH_URL || 'http://localhost:3000'
}
/**
* 获取应用 baseUrl
* 用于内部 API 调用、webhook 回调等场景
* 获取应用内部 baseUrl
* 用于容器内自调用、服务端 fetch 本应用 API、拉取本地 /api/files 资源等场景
*/
export function getInternalBaseUrl(): string {
return process.env.INTERNAL_APP_URL
|| process.env.INTERNAL_TASK_API_BASE_URL
|| process.env.NEXTAUTH_URL
|| 'http://localhost:3000'
}
/**
* 向后兼容:当前仓库中 getBaseUrl 主要用于服务端内部调用,因此默认返回内部地址。
*/
export function getBaseUrl(): string {
return process.env.NEXTAUTH_URL || 'http://localhost:3000'
return getInternalBaseUrl()
}
/**
@@ -16,7 +30,7 @@ export function getBaseUrl(): string {
* @param path API 路径,如 '/api/user/balance'
*/
export function getApiUrl(path: string): string {
const baseUrl = getBaseUrl()
const baseUrl = getInternalBaseUrl()
// 确保 path 以 / 开头
const normalizedPath = path.startsWith('/') ? path : `/${path}`
return `${baseUrl}${normalizedPath}`

View File

@@ -10,6 +10,7 @@
*/
import { GoogleGenAI } from '@google/genai'
import { getInternalBaseUrl } from '@/lib/env'
import { getImageBase64Cached } from './image-cache'
import { logInternal } from './logging/semantic'
@@ -89,7 +90,7 @@ export async function submitGeminiBatch(
// 🔧 本地模式修复:相对路径需要补全完整 URL
let fullUrl = imageData
if (imageData.startsWith('/')) {
const baseUrl = process.env.NEXTAUTH_URL || 'http://localhost:3000'
const baseUrl = getInternalBaseUrl()
fullUrl = `${baseUrl}${imageData}`
}
const base64DataUrl = await getImageBase64Cached(fullUrl)

View File

@@ -1,5 +1,6 @@
import { GoogleGenAI, HarmBlockThreshold, HarmCategory } from '@google/genai'
import { getProviderConfig } from '@/lib/api-config'
import { getInternalBaseUrl } from '@/lib/env'
import { getImageBase64Cached } from '@/lib/image-cache'
import { BaseImageGenerator, type GenerateResult, type ImageGenerateParams } from '../base'
import { setProxy } from '../../../../lib/prompts/proxy'
@@ -16,7 +17,7 @@ type GeminiCompatibleOptions = {
function toAbsoluteUrlIfNeeded(value: string): string {
if (!value.startsWith('/')) return value
const baseUrl = process.env.NEXTAUTH_URL || 'http://localhost:3000'
const baseUrl = getInternalBaseUrl()
return `${baseUrl}${value}`
}

View File

@@ -9,6 +9,7 @@ import { logInfo as _ulogInfo, logWarn as _ulogWarn } from '@/lib/logging/core'
*/
import { GoogleGenAI, HarmCategory, HarmBlockThreshold } from '@google/genai'
import { getInternalBaseUrl } from '@/lib/env'
import { BaseImageGenerator, ImageGenerateParams, GenerateResult } from '../base'
import { getProviderConfig } from '@/lib/api-config'
import { getImageBase64Cached } from '@/lib/image-cache'
@@ -94,7 +95,7 @@ export class GoogleGeminiImageGenerator extends BaseImageGenerator {
// 🔧 本地模式修复:相对路径需要补全完整 URL
let fullUrl = imageData
if (imageData.startsWith('/')) {
const baseUrl = process.env.NEXTAUTH_URL || 'http://localhost:3000'
const baseUrl = getInternalBaseUrl()
fullUrl = `${baseUrl}${imageData}`
}
const base64DataUrl = await getImageBase64Cached(fullUrl)

View File

@@ -36,4 +36,8 @@ export const LLM_OBSERVE_REASONING_VISIBLE = parseBoolean(
true,
)
export const INTERNAL_TASK_TOKEN = process.env.INTERNAL_TASK_TOKEN || ''
export const INTERNAL_TASK_API_BASE_URL = process.env.INTERNAL_TASK_API_BASE_URL || process.env.NEXTAUTH_URL || 'http://127.0.0.1:3000'
export const INTERNAL_TASK_API_BASE_URL =
process.env.INTERNAL_TASK_API_BASE_URL
|| process.env.INTERNAL_APP_URL
|| process.env.NEXTAUTH_URL
|| 'http://127.0.0.1:3000'

View File

@@ -1,5 +1,6 @@
import OpenAI from 'openai'
import { GoogleGenAI } from '@google/genai'
import { getInternalBaseUrl } from '@/lib/env'
import {
getProviderConfig,
getProviderKey,
@@ -246,7 +247,7 @@ export async function chatCompletionWithVision(
_ulogInfo('[LLM Vision] 转换本地图片为 Base64')
} catch (e) {
_ulogError('[LLM Vision] 转换本地图片失败:', e)
const baseUrl = process.env.NEXTAUTH_URL || 'http://localhost:3000'
const baseUrl = getInternalBaseUrl()
finalUrl = `${baseUrl}${url}`
}
}

View File

@@ -1,11 +1,12 @@
import OpenAI, { toFile } from 'openai'
import { getProviderConfig } from '@/lib/api-config'
import { getInternalBaseUrl } from '@/lib/env'
import { getImageBase64Cached } from '@/lib/image-cache'
import type { OpenAICompatClientConfig } from '../types'
function toAbsoluteUrlIfNeeded(value: string): string {
if (!value.startsWith('/')) return value
const baseUrl = process.env.NEXTAUTH_URL || 'http://localhost:3000'
const baseUrl = getInternalBaseUrl()
return `${baseUrl}${value}`
}

View File

@@ -20,7 +20,7 @@ export interface VoiceLine {
matchedPanelIndex?: number | null
}
export type PendingVoiceTaskStatus = 'queued' | 'processing' | 'completed' | 'failed' | null
export type PendingVoiceTaskStatus = 'queued' | 'processing' | 'completed' | 'failed' | 'canceled' | null
export interface PendingVoiceGenerationState {
submittedUpdatedAt: string | null

View File

@@ -72,7 +72,13 @@ async function fetchTaskStatus(taskId: string): Promise<{
: typeof payload.task?.errorMessage === 'string'
? payload.task.errorMessage
: null
if (status === 'queued' || status === 'processing' || status === 'completed' || status === 'failed') {
if (
status === 'queued'
|| status === 'processing'
|| status === 'completed'
|| status === 'failed'
|| status === 'canceled'
) {
return { status, errorMessage }
}
return { status: null, errorMessage }
@@ -97,7 +103,7 @@ export function useVoiceRuntimeSync({
useEffect(() => {
for (const [lineId, pending] of pendingEntries) {
if (pending.taskStatus !== 'failed') continue
if (pending.taskStatus !== 'failed' && pending.taskStatus !== 'canceled') continue
const failureKey = pending.taskId || lineId
if (reportedFailedTaskIdsRef.current.has(failureKey)) continue
reportedFailedTaskIdsRef.current.add(failureKey)

View File

@@ -186,7 +186,9 @@ export function useTaskStatus(params: {
const data = useMemo(() => {
const tasks = query.data || []
const latest = snapshotQuery.data || tasks[0] || null
const lastFailed = latest?.status === 'failed' ? (latest.error || null) : null
const lastFailed = latest?.status === 'failed' || latest?.status === 'canceled'
? (latest.error || null)
: null
return {
active: tasks,
hasActive: tasks.length > 0,

View File

@@ -1,172 +0,0 @@
import { normalizeAnyError } from '@/lib/errors/normalize'
import { buildLeanState, createCheckpoint, getRunById } from './service'
import type { StateRef } from './types'
type JsonRecord = Record<string, unknown>
export type GraphExecutorState = {
refs: StateRef
meta: JsonRecord
}
export type GraphNodeContext<TState extends GraphExecutorState> = {
runId: string
projectId: string
userId: string
nodeKey: string
attempt: number
state: TState
}
export type GraphNodeResult = {
output?: JsonRecord
checkpointRefs?: StateRef
checkpointMeta?: JsonRecord
}
export type GraphNode<TState extends GraphExecutorState> = {
key: string
title: string
maxAttempts?: number
timeoutMs?: number
run: (context: GraphNodeContext<TState>) => Promise<GraphNodeResult | void>
}
export type GraphExecutorInput<TState extends GraphExecutorState> = {
runId: string
projectId: string
userId: string
state: TState
nodes: GraphNode<TState>[]
}
export class GraphCancellationError extends Error {
constructor(message = 'run canceled') {
super(message)
this.name = 'GraphCancellationError'
}
}
function wait(ms: number) {
return new Promise((resolve) => setTimeout(resolve, ms))
}
function withTimeout<T>(task: Promise<T>, timeoutMs: number): Promise<T> {
if (!Number.isFinite(timeoutMs) || timeoutMs <= 0) {
return task
}
return new Promise<T>((resolve, reject) => {
const timer = setTimeout(() => {
reject(new Error(`node timeout after ${Math.floor(timeoutMs)}ms`))
}, timeoutMs)
task
.then((value) => {
clearTimeout(timer)
resolve(value)
})
.catch((error: unknown) => {
clearTimeout(timer)
reject(error)
})
})
}
function computeBackoffMs(attempt: number): number {
const base = Math.min(1_000 * Math.pow(2, Math.max(0, attempt - 1)), 10_000)
const jitter = Math.floor(Math.random() * 200)
return base + jitter
}
async function assertRunActive(runId: string, userId: string) {
const run = await getRunById(runId)
if (!run || run.userId !== userId) {
throw new GraphCancellationError('run not found')
}
if (run.status === 'canceling' || run.status === 'canceled') {
throw new GraphCancellationError('run canceled')
}
}
function mergeRefs(base: StateRef, next: StateRef | undefined): StateRef {
if (!next) return base
return {
scriptId: next.scriptId || base.scriptId,
storyboardId: next.storyboardId || base.storyboardId,
voiceLineBatchId: next.voiceLineBatchId || base.voiceLineBatchId,
versionHash: next.versionHash || base.versionHash,
cursor: next.cursor || base.cursor,
}
}
export async function executePipelineGraph<TState extends GraphExecutorState>(
input: GraphExecutorInput<TState>,
): Promise<TState> {
const { nodes, runId, projectId, userId, state } = input
for (const node of nodes) {
const maxAttempts = Number.isFinite(node.maxAttempts || 1)
? Math.max(1, Math.floor(node.maxAttempts || 1))
: 1
let attempt = 1
while (attempt <= maxAttempts) {
await assertRunActive(runId, userId)
try {
const result = await withTimeout(
node.run({
runId,
projectId,
userId,
nodeKey: node.key,
attempt,
state,
}),
node.timeoutMs || 0,
)
state.refs = mergeRefs(state.refs, result?.checkpointRefs)
if (result?.checkpointMeta) {
state.meta = {
...state.meta,
...result.checkpointMeta,
}
}
await createCheckpoint({
runId,
nodeKey: node.key,
version: attempt,
state: buildLeanState({
refs: state.refs,
meta: {
...state.meta,
nodeTitle: node.title,
nodeAttempt: attempt,
...(result?.output ? { output: result.output } : {}),
},
}),
})
break
} catch (error) {
if (error instanceof GraphCancellationError) {
throw error
}
const normalized = normalizeAnyError(error, { context: 'worker' })
const shouldRetry = normalized.retryable && attempt < maxAttempts
if (!shouldRetry) {
throw error
}
await wait(computeBackoffMs(attempt))
attempt += 1
}
}
}
return state
}

View File

@@ -1,89 +0,0 @@
import { Annotation, END, START, StateGraph } from '@langchain/langgraph'
import {
executePipelineGraph,
type GraphExecutorInput,
type GraphExecutorState,
} from './graph-executor'
function assertUniqueNodeKeys<TState extends GraphExecutorState>(input: GraphExecutorInput<TState>) {
const seen = new Set<string>()
for (const node of input.nodes) {
if (seen.has(node.key)) {
throw new Error(`LANGGRAPH_NODE_KEY_DUPLICATE: ${node.key}`)
}
seen.add(node.key)
}
}
function createStateAnnotation<TState extends GraphExecutorState>(initialState: TState) {
return Annotation.Root({
pipelineState: Annotation<unknown>({
reducer: (_current, update) => update,
default: () => initialState,
}),
})
}
function readPipelineState<TState extends GraphExecutorState>(value: unknown): TState {
if (!value || typeof value !== 'object') {
throw new Error('LANGGRAPH_STATE_INVALID: state object missing')
}
const pipelineState = (value as { pipelineState?: unknown }).pipelineState
if (!pipelineState || typeof pipelineState !== 'object') {
throw new Error('LANGGRAPH_STATE_INVALID: pipelineState missing')
}
return pipelineState as TState
}
function addEdgeUnsafe(
graphBuilder: unknown,
source: string,
target: string,
) {
const writable = graphBuilder as unknown as {
addEdge: (nextSource: string, nextTarget: string) => unknown
}
writable.addEdge(source, target)
}
export async function runLangGraphPipeline<TState extends GraphExecutorState>(
input: GraphExecutorInput<TState>,
): Promise<TState> {
if (input.nodes.length === 0) {
return input.state
}
assertUniqueNodeKeys(input)
const stateAnnotation = createStateAnnotation(input.state)
const graphBuilder = new StateGraph(stateAnnotation)
for (const node of input.nodes) {
graphBuilder.addNode(node.key, async (state: { pipelineState: unknown }) => {
const pipelineState = readPipelineState<TState>({ pipelineState: state.pipelineState })
await executePipelineGraph({
runId: input.runId,
projectId: input.projectId,
userId: input.userId,
state: pipelineState,
nodes: [node],
})
return {
pipelineState,
}
})
}
addEdgeUnsafe(graphBuilder, START, input.nodes[0].key)
for (let index = 0; index < input.nodes.length; index += 1) {
const currentKey = input.nodes[index].key
const nextKey = input.nodes[index + 1]?.key
addEdgeUnsafe(graphBuilder, currentKey, nextKey || END)
}
const compiled = graphBuilder.compile()
const result = await compiled.invoke({
pipelineState: input.state,
})
return readPipelineState<TState>(result)
}

View File

@@ -1,16 +0,0 @@
import {
type GraphExecutorInput,
type GraphExecutorState,
type GraphNode,
} from './graph-executor'
import { runLangGraphPipeline } from './langgraph-pipeline'
export type PipelineGraphState = GraphExecutorState
export type PipelineGraphNode<TState extends PipelineGraphState> = GraphNode<TState>
export type PipelineGraphInput<TState extends PipelineGraphState> = GraphExecutorInput<TState>
export async function runPipelineGraph<TState extends PipelineGraphState>(
input: PipelineGraphInput<TState>,
): Promise<TState> {
return await runLangGraphPipeline(input)
}

View File

@@ -1,31 +0,0 @@
import { runPipelineGraph, type PipelineGraphNode, type PipelineGraphState } from './pipeline-graph'
type QuickRunInput<TState extends PipelineGraphState> = {
runId: string
projectId: string
userId: string
nodeKey: string
nodeTitle: string
state: TState
run: PipelineGraphNode<TState>['run']
maxAttempts?: number
timeoutMs?: number
}
export async function runQuickRunGraph<TState extends PipelineGraphState>(input: QuickRunInput<TState>) {
return await runPipelineGraph({
runId: input.runId,
projectId: input.projectId,
userId: input.userId,
state: input.state,
nodes: [
{
key: input.nodeKey,
title: input.nodeTitle,
maxAttempts: input.maxAttempts,
timeoutMs: input.timeoutMs,
run: input.run,
},
],
})
}

View File

@@ -1,4 +1,5 @@
import { prisma } from '@/lib/prisma'
import { resolveRetryInvalidationStepKeys } from '@/lib/workflow-engine/dependencies'
import {
RUN_EVENT_TYPE,
RUN_STATE_MAX_BYTES,
@@ -30,6 +31,10 @@ type GraphRunRow = {
errorCode: string | null
errorMessage: string | null
cancelRequestedAt: Date | null
leaseOwner: string | null
leaseExpiresAt: Date | null
heartbeatAt: Date | null
workflowVersion: number
queuedAt: Date
startedAt: Date | null
finishedAt: Date | null
@@ -121,6 +126,7 @@ type GraphCheckpointModel = {
type GraphArtifactModel = {
upsert: (args: unknown) => Promise<GraphArtifactRow>
findMany: (args: unknown) => Promise<GraphArtifactRow[]>
deleteMany: (args: unknown) => Promise<{ count: number }>
}
type GraphRuntimeTx = {
@@ -226,6 +232,10 @@ function mapRunRow(run: GraphRunRow) {
errorCode: run.errorCode,
errorMessage: run.errorMessage,
cancelRequestedAt: toIso(run.cancelRequestedAt),
leaseOwner: run.leaseOwner,
leaseExpiresAt: toIso(run.leaseExpiresAt),
heartbeatAt: toIso(run.heartbeatAt),
workflowVersion: run.workflowVersion,
queuedAt: run.queuedAt.toISOString(),
startedAt: toIso(run.startedAt),
finishedAt: toIso(run.finishedAt),
@@ -457,6 +467,8 @@ async function applyRunProjection(tx: GraphRuntimeTx, input: RunEventInput) {
status: RUN_STATUS.COMPLETED,
output: payload,
finishedAt: now,
leaseOwner: null,
leaseExpiresAt: null,
},
})
await tx.graphStep.updateMany({
@@ -482,6 +494,8 @@ async function applyRunProjection(tx: GraphRuntimeTx, input: RunEventInput) {
errorCode: readString(payload, 'errorCode'),
errorMessage: resolveErrorMessage(payload),
finishedAt: now,
leaseOwner: null,
leaseExpiresAt: null,
},
})
await tx.graphStep.updateMany({
@@ -507,6 +521,8 @@ async function applyRunProjection(tx: GraphRuntimeTx, input: RunEventInput) {
data: {
status: RUN_STATUS.CANCELED,
finishedAt: now,
leaseOwner: null,
leaseExpiresAt: null,
},
})
await tx.graphStep.updateMany({
@@ -651,6 +667,10 @@ export async function createRun(input: CreateRunInput) {
targetId: input.targetId,
status: RUN_STATUS.QUEUED,
input: input.input || null,
leaseOwner: null,
leaseExpiresAt: null,
heartbeatAt: null,
workflowVersion: 1,
queuedAt: new Date(),
lastSeq: 0,
},
@@ -676,6 +696,112 @@ export async function getRunById(runId: string) {
return mapRunRow(row)
}
export async function findReusableActiveRun(params: {
userId: string
projectId: string
workflowType: string
targetType: string
targetId: string
}) {
const rows = await runtimeClient.graphRun.findMany({
where: {
userId: params.userId,
projectId: params.projectId,
workflowType: params.workflowType,
targetType: params.targetType,
targetId: params.targetId,
status: {
in: [RUN_STATUS.QUEUED, RUN_STATUS.RUNNING, RUN_STATUS.CANCELING],
},
},
orderBy: [
{ updatedAt: 'desc' },
{ createdAt: 'desc' },
],
take: 1,
})
const row = rows[0]
return row ? mapRunRow(row) : null
}
export async function claimRunLease(params: {
runId: string
userId: string
workerId: string
leaseMs: number
}) {
const now = new Date()
const leaseExpiresAt = new Date(now.getTime() + Math.max(5_000, Math.floor(params.leaseMs)))
const result = await runtimeClient.graphRun.updateMany({
where: {
id: params.runId,
userId: params.userId,
status: {
in: [RUN_STATUS.QUEUED, RUN_STATUS.RUNNING, RUN_STATUS.CANCELING],
},
OR: [
{ leaseOwner: null },
{ leaseOwner: params.workerId },
{ leaseExpiresAt: null },
{ leaseExpiresAt: { lt: now } },
],
},
data: {
leaseOwner: params.workerId,
leaseExpiresAt,
heartbeatAt: now,
},
})
if (result.count === 0) {
return null
}
return await getRunById(params.runId)
}
export async function renewRunLease(params: {
runId: string
userId: string
workerId: string
leaseMs: number
}) {
const now = new Date()
const leaseExpiresAt = new Date(now.getTime() + Math.max(5_000, Math.floor(params.leaseMs)))
const result = await runtimeClient.graphRun.updateMany({
where: {
id: params.runId,
userId: params.userId,
leaseOwner: params.workerId,
status: {
in: [RUN_STATUS.QUEUED, RUN_STATUS.RUNNING, RUN_STATUS.CANCELING],
},
},
data: {
leaseExpiresAt,
heartbeatAt: now,
},
})
if (result.count === 0) {
return null
}
return await getRunById(params.runId)
}
export async function releaseRunLease(params: {
runId: string
workerId: string
}) {
await runtimeClient.graphRun.updateMany({
where: {
id: params.runId,
leaseOwner: params.workerId,
},
data: {
leaseOwner: null,
leaseExpiresAt: null,
},
})
}
export async function getRunSnapshot(runId: string) {
const [run, steps] = await Promise.all([
runtimeClient.graphRun.findUnique({
@@ -977,8 +1103,21 @@ export async function retryFailedStep(params: {
throw new Error('RUN_STEP_NOT_FAILED')
}
const steps = await tx.graphStep.findMany({
where: { runId: params.runId },
orderBy: [
{ stepIndex: 'asc' },
{ updatedAt: 'asc' },
],
})
const now = new Date()
const nextAttempt = Math.max(1, step.currentAttempt + 1)
const invalidatedStepKeys = resolveRetryInvalidationStepKeys({
workflowType: run.workflowType,
stepKey,
existingStepKeys: steps.map((item) => item.stepKey),
})
const updatedRun = await tx.graphRun.update({
where: { id: params.runId },
data: {
@@ -990,6 +1129,20 @@ export async function retryFailedStep(params: {
startedAt: run.startedAt || now,
},
})
await tx.graphStep.updateMany({
where: {
runId: params.runId,
stepKey: { in: invalidatedStepKeys },
},
data: {
status: RUN_STEP_STATUS.PENDING,
currentAttempt: 0,
startedAt: null,
finishedAt: null,
lastErrorCode: null,
lastErrorMessage: null,
},
})
const updatedStep = await tx.graphStep.update({
where: {
runId_stepKey: {
@@ -998,12 +1151,13 @@ export async function retryFailedStep(params: {
},
},
data: {
status: RUN_STEP_STATUS.PENDING,
currentAttempt: nextAttempt,
startedAt: now,
finishedAt: null,
lastErrorCode: null,
lastErrorMessage: null,
},
})
await tx.graphArtifact.deleteMany({
where: {
runId: params.runId,
stepKey: { in: invalidatedStepKeys },
},
})
@@ -1011,6 +1165,7 @@ export async function retryFailedStep(params: {
run: mapRunRow(updatedRun),
step: mapStepRow(updatedStep),
retryAttempt: nextAttempt,
invalidatedStepKeys,
}
})
}

View File

@@ -69,6 +69,13 @@ export type CreateRunInput = {
input?: Record<string, unknown> | null
}
export type RunLeaseState = {
leaseOwner?: string | null
leaseExpiresAt?: string | null
heartbeatAt?: string | null
workflowVersion?: number
}
export type ListRunsInput = {
userId: string
projectId?: string

View File

@@ -0,0 +1,72 @@
import { TaskTerminatedError } from '@/lib/task/errors'
import { RUN_STATUS } from './types'
import { claimRunLease, getRunById, releaseRunLease, renewRunLease } from './service'
const DEFAULT_RUN_LEASE_MS = 30_000
export function getDefaultRunLeaseMs() {
return DEFAULT_RUN_LEASE_MS
}
export async function assertWorkflowRunActive(params: {
runId: string
workerId: string
stage: string
}) {
const run = await getRunById(params.runId)
if (!run) {
throw new TaskTerminatedError(params.runId, `Run terminated during ${params.stage}: run not found`)
}
if (run.leaseOwner !== params.workerId) {
throw new TaskTerminatedError(params.runId, `Run terminated during ${params.stage}: lease lost`)
}
if (
run.status === RUN_STATUS.CANCELING
|| run.status === RUN_STATUS.CANCELED
|| run.status === RUN_STATUS.COMPLETED
|| run.status === RUN_STATUS.FAILED
) {
throw new TaskTerminatedError(params.runId, `Run terminated during ${params.stage}`)
}
}
export async function withWorkflowRunLease<T>(params: {
runId: string
userId: string
workerId: string
leaseMs?: number
run: () => Promise<T>
}): Promise<{ claimed: boolean; result: T | null }> {
const leaseMs = params.leaseMs ?? DEFAULT_RUN_LEASE_MS
const claimed = await claimRunLease({
runId: params.runId,
userId: params.userId,
workerId: params.workerId,
leaseMs,
})
if (!claimed) {
return { claimed: false, result: null }
}
const heartbeatTimer = setInterval(() => {
void renewRunLease({
runId: params.runId,
userId: params.userId,
workerId: params.workerId,
leaseMs,
})
}, Math.max(5_000, Math.floor(leaseMs / 3)))
try {
return {
claimed: true,
result: await params.run(),
}
} finally {
clearInterval(heartbeatTimer)
await releaseRunLease({
runId: params.runId,
workerId: params.workerId,
})
}
}

View File

@@ -1,9 +1,10 @@
import { StorageConfigError } from './errors'
import { getInternalBaseUrl } from '@/lib/env'
export const DEFAULT_SIGNED_URL_EXPIRES_SECONDS = 24 * 60 * 60
export function resolveBaseUrl(): string {
return process.env.NEXTAUTH_URL || 'http://localhost:3000'
return getInternalBaseUrl()
}
export function toFetchableUrl(inputUrl: string): string {

View File

@@ -1,7 +1,7 @@
import { resolveTaskErrorMessage } from './error-message'
import { apiFetch } from '@/lib/api-fetch'
type TaskStatus = 'queued' | 'processing' | 'completed' | 'failed'
type TaskStatus = 'queued' | 'processing' | 'completed' | 'failed' | 'canceled'
type TaskSnapshot = {
id: string
@@ -63,7 +63,7 @@ export async function waitForTaskResult(taskId: string, options: WaitTaskOptions
if (task.status === 'completed') {
return task.result || { success: true }
}
if (task.status === 'failed') {
if (task.status === 'failed' || task.status === 'canceled') {
throw new Error(resolveTaskErrorMessage(task, `Task ${task.status}`))
}
if (task.status !== 'queued' && task.status !== 'processing') {

View File

@@ -3,6 +3,7 @@ import { redis } from '@/lib/redis'
import {
TASK_EVENT_TYPE,
TASK_SSE_EVENT_TYPE,
TASK_TYPE,
type TaskEventType,
type TaskLifecycleEventType,
type SSEEvent,
@@ -13,6 +14,10 @@ import { publishRunEvent } from '@/lib/run-runtime/publisher'
const CHANNEL_PREFIX = 'task-events:project:'
const STREAM_EPHEMERAL_ENABLED = process.env.LLM_STREAM_EPHEMERAL_ENABLED !== 'false'
const TASK_TYPES_WITH_DIRECT_RUN_EVENTS = new Set<string>([
TASK_TYPE.STORY_TO_SCRIPT_RUN,
TASK_TYPE.SCRIPT_TO_STORYBOARD_RUN,
])
type TaskEventRow = {
id: number
@@ -221,6 +226,9 @@ export function getProjectChannel(projectId: string) {
}
async function mirrorTaskEventToRun(message: SSEEvent) {
if (message.taskType && TASK_TYPES_WITH_DIRECT_RUN_EVENTS.has(message.taskType)) {
return
}
const runEvents = mapTaskSSEEventToRunEvents(message)
if (runEvents.length === 0) return
for (const event of runEvents) {

View File

@@ -59,6 +59,11 @@ const VOICE_TYPES = new Set<TaskType>([
TASK_TYPE.ASSET_HUB_VOICE_DESIGN,
])
const SINGLE_ATTEMPT_TASK_TYPES = new Set<TaskType>([
TASK_TYPE.STORY_TO_SCRIPT_RUN,
TASK_TYPE.SCRIPT_TO_STORYBOARD_RUN,
])
export function getQueueTypeByTaskType(type: TaskType): QueueType {
if (IMAGE_TYPES.has(type)) return 'image'
if (VIDEO_TYPES.has(type)) return 'video'
@@ -84,10 +89,14 @@ export async function addTaskJob(data: TaskJobData, opts?: JobsOptions) {
const queueType = getQueueTypeByTaskType(data.type)
const queue = getQueueByType(queueType)
const priority = typeof opts?.priority === 'number' ? opts.priority : 0
const attempts = SINGLE_ATTEMPT_TASK_TYPES.has(data.type)
? 1
: (typeof opts?.attempts === 'number' ? opts.attempts : undefined)
return await queue.add(data.type, data, {
jobId: data.taskId,
priority,
...(opts || {}),
...(attempts !== undefined ? { attempts } : {}),
})
}

View File

@@ -469,6 +469,20 @@ export async function tryMarkTaskFailed(taskId: string, errorCode: string, error
return result.count > 0
}
export async function tryMarkTaskCanceled(taskId: string, errorCode: string, errorMessage: string) {
const result = await taskModel.updateMany({
where: activeTaskWhere(taskId),
data: {
status: TASK_STATUS.CANCELED,
errorCode: errorCode.slice(0, 80),
errorMessage: errorMessage.slice(0, 2000),
finishedAt: new Date(),
heartbeatAt: null,
},
})
return result.count > 0
}
export async function markTaskProcessing(taskId: string, externalId?: string | null) {
return await tryMarkTaskProcessing(taskId, externalId)
}
@@ -485,6 +499,10 @@ export async function markTaskFailed(taskId: string, errorCode: string, errorMes
return await tryMarkTaskFailed(taskId, errorCode, errorMessage)
}
export async function markTaskCanceled(taskId: string, errorCode: string, errorMessage: string) {
return await tryMarkTaskCanceled(taskId, errorCode, errorMessage)
}
export async function cancelTask(taskId: string, reason = 'Task cancelled by user') {
const snapshot = await taskModel.findUnique({
where: { id: taskId },
@@ -514,7 +532,7 @@ export async function cancelTask(taskId: string, reason = 'Task cancelled by use
}
const failure = resolveCompensationFailure(rollbackResult, 'TASK_CANCELLED', reason)
const cancelled = await tryMarkTaskFailed(taskId, failure.errorCode, failure.errorMessage)
const cancelled = await tryMarkTaskCanceled(taskId, failure.errorCode, failure.errorMessage)
const task = await taskModel.findUnique({ where: { id: taskId } })
return {
task,

View File

@@ -125,7 +125,7 @@ export function resolveTargetState(
const running = filtered.find((task) => ACTIVE_STATUS.has(task.status)) || null
const terminal = filtered.find((task) =>
task.status === 'completed' || task.status === 'failed'
task.status === 'completed' || task.status === 'failed' || task.status === 'canceled'
) || null
const latest = running || terminal
@@ -240,7 +240,7 @@ export async function queryTaskTargetStates(params: {
targetId: item.targetId,
})),
status: {
in: ['queued', 'processing', 'completed', 'failed'],
in: ['queued', 'processing', 'completed', 'failed', 'canceled'],
},
...typeFilter,
},

View File

@@ -3,6 +3,7 @@ import { addTaskJob } from './queues'
import { publishTaskEvent } from './publisher'
import {
createTask,
getTaskById,
markTaskEnqueueFailed,
markTaskEnqueued,
markTaskFailed,
@@ -10,7 +11,7 @@ import {
updateTaskBillingInfo,
updateTaskPayload,
} from './service'
import { TASK_EVENT_TYPE, type TaskBillingInfo, type TaskType } from './types'
import { TASK_EVENT_TYPE, TASK_STATUS, TASK_TYPE, type TaskBillingInfo, type TaskType } from './types'
import {
buildDefaultTaskBillingInfo,
getBillingMode,
@@ -21,9 +22,18 @@ import {
import { ApiError } from '@/lib/api-errors'
import { getTaskFlowMeta } from '@/lib/llm-observe/stage-pipeline'
import type { Locale } from '@/i18n/routing'
import { attachTaskToRun, createRun } from '@/lib/run-runtime/service'
import { attachTaskToRun, createRun, findReusableActiveRun } from '@/lib/run-runtime/service'
import { isAiTaskType, workflowTypeFromTaskType } from '@/lib/run-runtime/workflow'
const RUN_CENTRIC_TASK_TYPES = new Set<TaskType>([
TASK_TYPE.STORY_TO_SCRIPT_RUN,
TASK_TYPE.SCRIPT_TO_STORYBOARD_RUN,
])
function isRunCentricTaskType(type: TaskType): boolean {
return RUN_CENTRIC_TASK_TYPES.has(type)
}
export function toObject(value: unknown): Record<string, unknown> {
if (!value || typeof value !== 'object' || Array.isArray(value)) return {}
return value as Record<string, unknown>
@@ -125,6 +135,34 @@ export async function submitTask(params: {
? buildDefaultTaskBillingInfo(params.type, normalizedPayload)
: null
const resolvedBillingInfo = computedBillingInfo || params.billingInfo || null
const runCentricTask = isRunCentricTaskType(params.type)
const workflowType = workflowTypeFromTaskType(params.type)
const reusableRun = runCentricTask
? await findReusableActiveRun({
userId: params.userId,
projectId: params.projectId,
workflowType,
targetType: params.targetType,
targetId: params.targetId,
})
: null
if (runCentricTask && reusableRun?.taskId) {
const existingTask = await getTaskById(reusableRun.taskId)
if (
existingTask
&& (existingTask.status === TASK_STATUS.QUEUED || existingTask.status === TASK_STATUS.PROCESSING)
) {
return {
success: true,
async: true,
taskId: existingTask.id,
runId: reusableRun.id,
status: existingTask.status,
deduped: true as const,
}
}
}
const { task, deduped } = await createTask({
userId: params.userId,
@@ -134,18 +172,29 @@ export async function submitTask(params: {
targetType: params.targetType,
targetId: params.targetId,
payload: normalizedPayload,
dedupeKey: params.dedupeKey || null,
dedupeKey: runCentricTask ? null : (params.dedupeKey || null),
priority: params.priority,
maxAttempts: params.maxAttempts,
billingInfo: resolvedBillingInfo || null,
})
let runId = resolveRunIdFromPayload(task.payload)
if (!deduped && isAiTaskType(params.type) && !runId) {
let runId = reusableRun?.id || resolveRunIdFromPayload(task.payload)
if (!deduped && reusableRun && runId) {
const payloadWithRunId = {
...normalizedPayload,
runId,
meta: {
...toObject(normalizedPayload.meta),
runId,
},
}
await updateTaskPayload(task.id, payloadWithRunId)
await attachTaskToRun(runId, task.id)
} else if (!deduped && isAiTaskType(params.type) && !runId) {
const run = await createRun({
userId: params.userId,
projectId: params.projectId,
episodeId: params.episodeId || null,
workflowType: workflowTypeFromTaskType(params.type),
workflowType,
taskType: params.type,
taskId: task.id,
targetType: params.targetType,

View File

@@ -5,6 +5,7 @@ export const TASK_STATUS = {
PROCESSING: 'processing',
COMPLETED: 'completed',
FAILED: 'failed',
CANCELED: 'canceled',
DISMISSED: 'dismissed',
} as const

View File

@@ -16,6 +16,11 @@ export type WorkerInternalLLMStreamCallbacks = InternalLLMStreamCallbacks & {
flush: () => Promise<void>
}
export type WorkerLLMActiveController = {
assertActive?: (stage: string) => Promise<void>
isActive?: () => Promise<boolean>
}
export function createWorkerLLMStreamContext(job: Job<TaskJobData>, label = 'worker'): WorkerLLMStreamContext {
return {
streamRunId: `run:${job.data.taskId}:${label}:${Date.now().toString(36)}:${Math.random().toString(36).slice(2, 8)}`,
@@ -33,6 +38,7 @@ function nextWorkerStreamSeq(streamContext: WorkerLLMStreamContext, stepId: stri
export function createWorkerLLMStreamCallbacks(
job: Job<TaskJobData>,
streamContext: WorkerLLMStreamContext,
activeController?: WorkerLLMActiveController,
): WorkerInternalLLMStreamCallbacks {
const maxChunkChars = 128
const activeProbeIntervalMs = 600
@@ -54,13 +60,28 @@ export function createWorkerLLMStreamCallbacks(
if (terminatedError) throw terminatedError
}
const assertActive = async (stage: string) => {
if (activeController?.assertActive) {
await activeController.assertActive(stage)
return
}
await assertTaskActive(job, stage)
}
const probeActive = async () => {
if (activeController?.isActive) {
return await activeController.isActive()
}
return await isTaskActive(job.data.taskId)
}
const scheduleActiveProbe = () => {
if (terminatedError || checkingActive) return
const now = Date.now()
if (now - lastActiveProbeAt < activeProbeIntervalMs) return
checkingActive = true
lastActiveProbeAt = now
void isTaskActive(job.data.taskId)
void probeActive()
.then((active) => {
if (!active) {
markTerminated('worker_llm_stream_probe')
@@ -78,7 +99,7 @@ export function createWorkerLLMStreamCallbacks(
.catch(() => undefined)
.then(async () => {
ensureActiveOrThrow(stage)
await assertTaskActive(job, stage)
await assertActive(stage)
await work()
})
.catch((error) => {

View File

@@ -11,12 +11,13 @@ import { onProjectNameAvailable } from '@/lib/logging/file-writer'
import { buildCharactersIntroduction } from '@/lib/constants'
import { TaskTerminatedError } from '@/lib/task/errors'
import { reportTaskProgress } from '@/lib/workers/shared'
import { assertTaskActive } from '@/lib/workers/utils'
import {
JsonParseError,
runScriptToStoryboardOrchestrator,
type ScriptToStoryboardStepMeta,
type ScriptToStoryboardStepOutput,
type ScriptToStoryboardOrchestratorResult,
} from '@/lib/novel-promotion/script-to-storyboard/orchestrator'
import { runScriptToStoryboardGraph } from '@/lib/workflows/script-to-storyboard/graph'
import { createWorkerLLMStreamCallbacks, createWorkerLLMStreamContext } from './llm-stream'
import type { TaskJobData } from '@/lib/task/types'
import {
@@ -32,6 +33,7 @@ import {
import { buildPrompt, getPromptTemplate, PROMPT_IDS } from '@/lib/prompt-i18n'
import { resolveAnalysisModel } from './resolve-analysis-model'
import { createArtifact } from '@/lib/run-runtime/service'
import { assertWorkflowRunActive, withWorkflowRunLease } from '@/lib/run-runtime/workflow-lease'
import {
parseStoryboardRetryTarget,
runScriptToStoryboardAtomicRetry,
@@ -40,6 +42,10 @@ import {
type AnyObj = Record<string, unknown>
const MAX_VOICE_ANALYZE_ATTEMPTS = 2
function buildWorkflowWorkerId(job: Job<TaskJobData>, label: string) {
return `${label}:${job.queueName}:${job.data.taskId}`
}
function isReasoningEffort(value: unknown): value is 'minimal' | 'low' | 'medium' | 'high' {
return value === 'minimal' || value === 'low' || value === 'medium' || value === 'high'
}
@@ -135,19 +141,44 @@ export async function handleScriptToStoryboardTask(job: Job<TaskJobData>) {
const reasoningEffort = requestedReasoningEffort
|| (isReasoningEffort(capabilityReasoningEffort) ? capabilityReasoningEffort : 'high')
await reportTaskProgress(job, 10, {
stage: 'script_to_storyboard_prepare',
stageLabel: 'progress.stage.scriptToStoryboardPrepare',
displayMode: 'detail',
})
const phase1PlanTemplate = getPromptTemplate(PROMPT_IDS.NP_AGENT_STORYBOARD_PLAN, job.data.locale)
const phase2CinematographyTemplate = getPromptTemplate(PROMPT_IDS.NP_AGENT_CINEMATOGRAPHER, job.data.locale)
const phase2ActingTemplate = getPromptTemplate(PROMPT_IDS.NP_AGENT_ACTING_DIRECTION, job.data.locale)
const phase3DetailTemplate = getPromptTemplate(PROMPT_IDS.NP_AGENT_STORYBOARD_DETAIL, job.data.locale)
const payloadMeta = typeof payload.meta === 'object' && payload.meta !== null
? (payload.meta as AnyObj)
: {}
const runId = typeof payload.runId === 'string' && payload.runId.trim()
? payload.runId.trim()
: (typeof payloadMeta.runId === 'string' ? payloadMeta.runId.trim() : '')
if (!runId) {
throw new Error('runId is required for script_to_storyboard pipeline')
}
const workerId = buildWorkflowWorkerId(job, 'script_to_storyboard')
const assertRunActive = async (stage: string) => {
await assertWorkflowRunActive({
runId,
workerId,
stage,
})
}
const streamContext = createWorkerLLMStreamContext(job, 'script_to_storyboard')
const callbacks = createWorkerLLMStreamCallbacks(job, streamContext)
const callbacks = createWorkerLLMStreamCallbacks(job, streamContext, {
assertActive: async (stage) => {
await assertRunActive(stage)
},
isActive: async () => {
try {
await assertRunActive('worker_llm_stream_probe')
return true
} catch (error) {
if (error instanceof TaskTerminatedError) {
return false
}
throw error
}
},
})
const runStep = async (
meta: ScriptToStoryboardStepMeta,
@@ -158,7 +189,7 @@ export async function handleScriptToStoryboardTask(job: Job<TaskJobData>) {
void _maxOutputTokens
const stepAttempt = meta.stepAttempt
|| (retryStepKey && meta.stepId === retryStepKey ? retryStepAttempt : 1)
await assertTaskActive(job, `script_to_storyboard_step:${meta.stepId}`)
await assertRunActive(`script_to_storyboard_step:${meta.stepId}`)
const progress = 15 + Math.min(70, Math.floor((meta.stepIndex / Math.max(1, meta.stepTotal)) * 70))
await reportTaskProgress(job, progress, {
stage: 'script_to_storyboard_step',
@@ -177,7 +208,6 @@ export async function handleScriptToStoryboardTask(job: Job<TaskJobData>) {
blockedBy: Array.isArray(meta.blockedBy) ? meta.blockedBy : [],
})
// Log prompt input
logAIAnalysis(job.data.userId, 'worker', projectId, project.name, {
action: `SCRIPT_TO_STORYBOARD_PROMPT:${action}`,
input: { stepId: meta.stepId, stepTitle: meta.stepTitle, prompt },
@@ -198,10 +228,8 @@ export async function handleScriptToStoryboardTask(job: Job<TaskJobData>) {
reasoning,
reasoningEffort,
})
// Ensure this step's stream terminal events are flushed before entering dependent steps.
await callbacks.flush()
// Log AI response output (full raw text included for JSON parse debugging)
logAIAnalysis(job.data.userId, 'worker', projectId, project.name, {
action: `SCRIPT_TO_STORYBOARD_OUTPUT:${action}`,
output: {
@@ -220,405 +248,415 @@ export async function handleScriptToStoryboardTask(job: Job<TaskJobData>) {
}
}
const payloadMeta = typeof payload.meta === 'object' && payload.meta !== null
? (payload.meta as AnyObj)
: {}
const runId = typeof payload.runId === 'string' && payload.runId.trim()
? payload.runId.trim()
: (typeof payloadMeta.runId === 'string' ? payloadMeta.runId.trim() : '')
if (!runId) {
throw new Error('runId is required for script_to_storyboard pipeline')
}
const leaseResult = await withWorkflowRunLease({
runId,
userId: job.data.userId,
workerId,
run: async () => {
await reportTaskProgress(job, 10, {
stage: 'script_to_storyboard_prepare',
stageLabel: 'progress.stage.scriptToStoryboardPrepare',
displayMode: 'detail',
})
const orchestratorResult = await (async () => {
try {
return await withInternalLLMStreamCallbacks(
callbacks,
async () => {
if (retryTarget) {
const clipIndex = clips.findIndex((clip) => clip.id === retryTarget.clipId)
if (clipIndex < 0) {
throw new Error(`Retry clip not found: ${retryTarget.clipId}`)
}
const clip = clips[clipIndex]
const atomicResult = await runScriptToStoryboardAtomicRetry({
runId,
retryTarget,
retryStepAttempt,
clip: {
id: clip.id,
content: clip.content,
characters: clip.characters,
location: clip.location,
screenplay: clip.screenplay,
},
clipIndex,
totalClipCount: clips.length,
novelPromotionData: {
characters: novelData.characters || [],
locations: novelData.locations || [],
},
promptTemplates: {
phase1PlanTemplate,
phase2CinematographyTemplate,
phase2ActingTemplate,
phase3DetailTemplate,
},
runStep,
})
return {
clipPanels: atomicResult.clipPanels,
phase1PanelsByClipId: atomicResult.phase1PanelsByClipId,
phase2CinematographyByClipId: atomicResult.phase2CinematographyByClipId,
phase2ActingByClipId: atomicResult.phase2ActingByClipId,
phase3PanelsByClipId: atomicResult.phase3PanelsByClipId,
summary: {
clipCount: selectedClips.length,
totalPanelCount: atomicResult.totalPanelCount,
totalStepCount: atomicResult.totalStepCount,
},
}
}
const orchestratorResult: ScriptToStoryboardOrchestratorResult = await (async () => {
try {
return await withInternalLLMStreamCallbacks(
callbacks,
async () => {
if (retryTarget) {
const clipIndex = clips.findIndex((clip) => clip.id === retryTarget.clipId)
if (clipIndex < 0) {
throw new Error(`Retry clip not found: ${retryTarget.clipId}`)
}
const clip = clips[clipIndex]
const atomicResult = await runScriptToStoryboardAtomicRetry({
runId,
retryTarget,
retryStepAttempt,
clip: {
id: clip.id,
content: clip.content,
characters: clip.characters,
location: clip.location,
screenplay: clip.screenplay,
},
clipIndex,
totalClipCount: clips.length,
novelPromotionData: {
characters: novelData.characters || [],
locations: novelData.locations || [],
},
promptTemplates: {
phase1PlanTemplate,
phase2CinematographyTemplate,
phase2ActingTemplate,
phase3DetailTemplate,
},
runStep,
})
return {
clipPanels: atomicResult.clipPanels,
phase1PanelsByClipId: atomicResult.phase1PanelsByClipId,
phase2CinematographyByClipId: atomicResult.phase2CinematographyByClipId,
phase2ActingByClipId: atomicResult.phase2ActingByClipId,
phase3PanelsByClipId: atomicResult.phase3PanelsByClipId,
summary: {
clipCount: selectedClips.length,
totalPanelCount: atomicResult.totalPanelCount,
totalStepCount: atomicResult.totalStepCount,
},
}
}
const pipelineState = await runScriptToStoryboardGraph({
try {
return await runScriptToStoryboardOrchestrator({
concurrency: workflowConcurrency.analysis,
clips: selectedClips.map((clip) => ({
id: clip.id,
content: clip.content,
characters: clip.characters,
location: clip.location,
screenplay: clip.screenplay,
})),
novelPromotionData: {
characters: novelData.characters || [],
locations: novelData.locations || [],
},
promptTemplates: {
phase1PlanTemplate,
phase2CinematographyTemplate,
phase2ActingTemplate,
phase3DetailTemplate,
},
runStep,
})
} catch (error) {
if (error instanceof JsonParseError) {
logAIAnalysis(job.data.userId, 'worker', projectId, project.name, {
action: 'SCRIPT_TO_STORYBOARD_PARSE_ERROR',
error: {
message: error.message,
rawTextPreview: error.rawText.slice(0, 3000),
rawTextLength: error.rawText.length,
},
model,
})
}
throw error
}
},
)
} finally {
await callbacks.flush()
}
})()
const phase1Map = orchestratorResult.phase1PanelsByClipId || {}
const phase2CinematographyMap = orchestratorResult.phase2CinematographyByClipId || {}
const phase2ActingMap = orchestratorResult.phase2ActingByClipId || {}
const phase3Map = orchestratorResult.phase3PanelsByClipId || {}
for (const clip of selectedClips) {
const phase1Panels = phase1Map[clip.id] || []
if (phase1Panels.length > 0) {
await createArtifact({
runId,
projectId,
userId: job.data.userId,
concurrency: workflowConcurrency.analysis,
clips: selectedClips.map((clip) => ({
id: clip.id,
content: clip.content,
characters: clip.characters,
location: clip.location,
screenplay: clip.screenplay,
})),
novelPromotionData: {
characters: novelData.characters || [],
locations: novelData.locations || [],
},
promptTemplates: {
phase1PlanTemplate,
phase2CinematographyTemplate,
phase2ActingTemplate,
phase3DetailTemplate,
},
runStep,
onParseError: (err) => {
logAIAnalysis(job.data.userId, 'worker', projectId, project.name, {
action: 'SCRIPT_TO_STORYBOARD_PARSE_ERROR',
error: {
message: err.message,
rawTextPreview: err.rawText.slice(0, 3000),
rawTextLength: err.rawText.length,
},
model,
})
stepKey: `clip_${clip.id}_phase1`,
artifactType: 'storyboard.clip.phase1',
refId: clip.id,
payload: {
panels: phase1Panels,
},
})
const result = pipelineState.orchestratorResult
if (!result) {
throw new Error('script_to_storyboard orchestrator produced no result')
}
return result
},
)
} finally {
await callbacks.flush()
}
})()
}
const phase2Cinematography = phase2CinematographyMap[clip.id] || []
if (phase2Cinematography.length > 0) {
await createArtifact({
runId,
stepKey: `clip_${clip.id}_phase2_cinematography`,
artifactType: 'storyboard.clip.phase2.cine',
refId: clip.id,
payload: {
rules: phase2Cinematography,
},
})
}
const phase2Acting = phase2ActingMap[clip.id] || []
if (phase2Acting.length > 0) {
await createArtifact({
runId,
stepKey: `clip_${clip.id}_phase2_acting`,
artifactType: 'storyboard.clip.phase2.acting',
refId: clip.id,
payload: {
directions: phase2Acting,
},
})
}
const phase3Panels = phase3Map[clip.id] || []
if (phase3Panels.length > 0) {
await createArtifact({
runId,
stepKey: `clip_${clip.id}_phase3_detail`,
artifactType: 'storyboard.clip.phase3',
refId: clip.id,
payload: {
panels: phase3Panels,
},
})
}
}
const phase1Map = orchestratorResult.phase1PanelsByClipId || {}
const phase2CinematographyMap = orchestratorResult.phase2CinematographyByClipId || {}
const phase2ActingMap = orchestratorResult.phase2ActingByClipId || {}
const phase3Map = orchestratorResult.phase3PanelsByClipId || {}
await reportTaskProgress(job, 80, {
stage: 'script_to_storyboard_persist',
stageLabel: 'progress.stage.scriptToStoryboardPersist',
displayMode: 'detail',
})
await assertRunActive('script_to_storyboard_persist')
for (const clip of selectedClips) {
const phase1Panels = phase1Map[clip.id] || []
if (phase1Panels.length > 0) {
await createArtifact({
runId,
stepKey: `clip_${clip.id}_phase1`,
artifactType: 'storyboard.clip.phase1',
refId: clip.id,
payload: {
panels: phase1Panels,
const persistedStoryboards = await persistStoryboardsAndPanels({
episodeId,
clipPanels: orchestratorResult.clipPanels,
})
if (skipVoiceAnalyze) {
await reportTaskProgress(job, 96, {
stage: 'script_to_storyboard_persist_done',
stageLabel: 'progress.stage.scriptToStoryboardPersistDone',
displayMode: 'detail',
message: 'step retry complete',
stepId: retryStepKey || undefined,
stepAttempt:
typeof payload.retryStepAttempt === 'number' && Number.isFinite(payload.retryStepAttempt)
? Math.max(1, Math.floor(payload.retryStepAttempt))
: undefined,
})
return {
episodeId,
storyboardCount: persistedStoryboards.length,
panelCount: orchestratorResult.summary.totalPanelCount,
voiceLineCount: 0,
retryStepKey,
}
}
if (!episode.novelText || !episode.novelText.trim()) {
throw new Error('No novel text to analyze')
}
const voicePrompt = buildPrompt({
promptId: PROMPT_IDS.NP_VOICE_ANALYSIS,
locale: job.data.locale,
variables: {
input: episode.novelText,
characters_lib_name: (novelData.characters || []).length > 0
? (novelData.characters || []).map((item) => item.name).join('、')
: '无',
characters_introduction: buildCharactersIntroduction(novelData.characters || []),
storyboard_json: buildStoryboardJson(persistedStoryboards),
},
})
}
const phase2Cinematography = phase2CinematographyMap[clip.id] || []
if (phase2Cinematography.length > 0) {
await createArtifact({
runId,
stepKey: `clip_${clip.id}_phase2_cinematography`,
artifactType: 'storyboard.clip.phase2.cine',
refId: clip.id,
payload: {
rules: phase2Cinematography,
},
})
}
const phase2Acting = phase2ActingMap[clip.id] || []
if (phase2Acting.length > 0) {
await createArtifact({
runId,
stepKey: `clip_${clip.id}_phase2_acting`,
artifactType: 'storyboard.clip.phase2.acting',
refId: clip.id,
payload: {
directions: phase2Acting,
},
})
}
const phase3Panels = phase3Map[clip.id] || []
if (phase3Panels.length > 0) {
await createArtifact({
runId,
stepKey: `clip_${clip.id}_phase3_detail`,
artifactType: 'storyboard.clip.phase3',
refId: clip.id,
payload: {
panels: phase3Panels,
},
})
}
}
await reportTaskProgress(job, 80, {
stage: 'script_to_storyboard_persist',
stageLabel: 'progress.stage.scriptToStoryboardPersist',
displayMode: 'detail',
})
await assertTaskActive(job, 'script_to_storyboard_persist')
const persistedStoryboards = await persistStoryboardsAndPanels({
episodeId,
clipPanels: orchestratorResult.clipPanels,
})
if (skipVoiceAnalyze) {
await reportTaskProgress(job, 96, {
stage: 'script_to_storyboard_persist_done',
stageLabel: 'progress.stage.scriptToStoryboardPersistDone',
displayMode: 'detail',
message: 'step retry complete',
stepId: retryStepKey || undefined,
stepAttempt:
typeof payload.retryStepAttempt === 'number' && Number.isFinite(payload.retryStepAttempt)
? Math.max(1, Math.floor(payload.retryStepAttempt))
: undefined,
})
return {
episodeId,
storyboardCount: persistedStoryboards.length,
panelCount: orchestratorResult.summary.totalPanelCount,
voiceLineCount: 0,
retryStepKey,
}
}
if (!episode.novelText || !episode.novelText.trim()) {
throw new Error('No novel text to analyze')
}
const voicePrompt = buildPrompt({
promptId: PROMPT_IDS.NP_VOICE_ANALYSIS,
locale: job.data.locale,
variables: {
input: episode.novelText,
characters_lib_name: (novelData.characters || []).length > 0
? (novelData.characters || []).map((item) => item.name).join('、')
: '无',
characters_introduction: buildCharactersIntroduction(novelData.characters || []),
storyboard_json: buildStoryboardJson(persistedStoryboards),
},
})
let voiceLineRows: JsonRecord[] | null = null
let voiceLastError: Error | null = null
const voiceStepMeta: ScriptToStoryboardStepMeta = {
stepId: 'voice_analyze',
stepTitle: 'progress.streamStep.voiceAnalyze',
stepIndex: orchestratorResult.summary.totalStepCount,
stepTotal: orchestratorResult.summary.totalStepCount,
retryable: true,
}
try {
for (let voiceAttempt = 1; voiceAttempt <= MAX_VOICE_ANALYZE_ATTEMPTS; voiceAttempt++) {
const meta: ScriptToStoryboardStepMeta = {
...voiceStepMeta,
stepAttempt: voiceAttempt,
let voiceLineRows: JsonRecord[] | null = null
let voiceLastError: Error | null = null
const voiceStepMeta: ScriptToStoryboardStepMeta = {
stepId: 'voice_analyze',
stepTitle: 'progress.streamStep.voiceAnalyze',
stepIndex: orchestratorResult.summary.totalStepCount,
stepTotal: orchestratorResult.summary.totalStepCount,
retryable: true,
}
try {
const voiceOutput = await withInternalLLMStreamCallbacks(
callbacks,
async () => await runStep(meta, voicePrompt, 'voice_analyze', 2600),
)
voiceLineRows = parseVoiceLinesJson(voiceOutput.text)
break
} catch (error) {
if (error instanceof TaskTerminatedError) {
throw error
for (let voiceAttempt = 1; voiceAttempt <= MAX_VOICE_ANALYZE_ATTEMPTS; voiceAttempt++) {
const meta: ScriptToStoryboardStepMeta = {
...voiceStepMeta,
stepAttempt: voiceAttempt,
}
try {
const voiceOutput = await withInternalLLMStreamCallbacks(
callbacks,
async () => await runStep(meta, voicePrompt, 'voice_analyze', 2600),
)
voiceLineRows = parseVoiceLinesJson(voiceOutput.text)
break
} catch (error) {
if (error instanceof TaskTerminatedError) {
throw error
}
voiceLastError = error instanceof Error ? error : new Error(String(error))
if (voiceAttempt < MAX_VOICE_ANALYZE_ATTEMPTS) {
await reportTaskProgress(job, 84, {
stage: 'script_to_storyboard_step',
stageLabel: 'progress.stage.scriptToStoryboardStep',
displayMode: 'detail',
message: `台词分析失败,准备重试 (${voiceAttempt + 1}/${MAX_VOICE_ANALYZE_ATTEMPTS})`,
stepId: voiceStepMeta.stepId,
stepAttempt: voiceAttempt + 1,
stepTitle: voiceStepMeta.stepTitle,
stepIndex: voiceStepMeta.stepIndex,
stepTotal: voiceStepMeta.stepTotal,
})
}
}
}
voiceLastError = error instanceof Error ? error : new Error(String(error))
if (voiceAttempt < MAX_VOICE_ANALYZE_ATTEMPTS) {
await reportTaskProgress(job, 84, {
stage: 'script_to_storyboard_step',
stageLabel: 'progress.stage.scriptToStoryboardStep',
displayMode: 'detail',
message: `台词分析失败,准备重试 (${voiceAttempt + 1}/${MAX_VOICE_ANALYZE_ATTEMPTS})`,
stepId: voiceStepMeta.stepId,
stepAttempt: voiceAttempt + 1,
stepTitle: voiceStepMeta.stepTitle,
stepIndex: voiceStepMeta.stepIndex,
stepTotal: voiceStepMeta.stepTotal,
})
} finally {
await callbacks.flush()
}
if (!voiceLineRows) {
throw voiceLastError!
}
await createArtifact({
runId,
stepKey: 'voice_analyze',
artifactType: 'voice.lines',
refId: episodeId,
payload: {
lines: voiceLineRows,
},
})
await assertRunActive('script_to_storyboard_voice_persist')
const panelIdByStoryboardPanel = new Map<string, string>()
for (const storyboard of persistedStoryboards) {
for (const panel of storyboard.panels) {
panelIdByStoryboardPanel.set(`${storyboard.storyboardId}:${panel.panelIndex}`, panel.id)
}
}
}
} finally {
await callbacks.flush()
}
if (!voiceLineRows) {
throw voiceLastError!
}
await createArtifact({
runId,
stepKey: 'voice_analyze',
artifactType: 'voice.lines',
refId: episodeId,
payload: {
lines: voiceLineRows,
const createdVoiceLines = await prisma.$transaction(async (tx) => {
const voiceLineModel = tx.novelPromotionVoiceLine as unknown as {
upsert?: (args: unknown) => Promise<{ id: string }>
create: (args: unknown) => Promise<{ id: string }>
deleteMany: (args: unknown) => Promise<unknown>
}
const created: Array<{ id: string }> = []
for (let i = 0; i < voiceLineRows.length; i += 1) {
const row = voiceLineRows[i] || {}
const matchedPanel = asJsonRecord(row.matchedPanel)
const matchedStoryboardId =
matchedPanel && typeof matchedPanel.storyboardId === 'string'
? matchedPanel.storyboardId.trim()
: null
const matchedPanelIndex = matchedPanel ? toPositiveInt(matchedPanel.panelIndex) : null
let matchedPanelId: string | null = null
if (matchedPanel !== null) {
if (!matchedStoryboardId || matchedPanelIndex === null) {
throw new Error(`voice line ${i + 1} has invalid matchedPanel reference`)
}
const panelKey = `${matchedStoryboardId}:${matchedPanelIndex}`
const resolvedPanelId = panelIdByStoryboardPanel.get(panelKey)
if (!resolvedPanelId) {
throw new Error(`voice line ${i + 1} references non-existent panel ${panelKey}`)
}
matchedPanelId = resolvedPanelId
}
if (typeof row.emotionStrength !== 'number' || !Number.isFinite(row.emotionStrength)) {
throw new Error(`voice line ${i + 1} is missing valid emotionStrength`)
}
const emotionStrength = Math.min(1, Math.max(0.1, row.emotionStrength))
if (typeof row.lineIndex !== 'number' || !Number.isFinite(row.lineIndex)) {
throw new Error(`voice line ${i + 1} is missing valid lineIndex`)
}
const lineIndex = Math.floor(row.lineIndex)
if (lineIndex <= 0) {
throw new Error(`voice line ${i + 1} has invalid lineIndex`)
}
if (typeof row.speaker !== 'string' || !row.speaker.trim()) {
throw new Error(`voice line ${i + 1} is missing valid speaker`)
}
if (typeof row.content !== 'string' || !row.content.trim()) {
throw new Error(`voice line ${i + 1} is missing valid content`)
}
const upsertArgs = {
where: {
episodeId_lineIndex: {
episodeId,
lineIndex,
},
},
create: {
episodeId,
lineIndex,
speaker: row.speaker.trim(),
content: row.content,
emotionStrength,
matchedPanelId,
matchedStoryboardId: matchedPanelId ? matchedStoryboardId : null,
matchedPanelIndex,
},
update: {
speaker: row.speaker.trim(),
content: row.content,
emotionStrength,
matchedPanelId,
matchedStoryboardId: matchedPanelId ? matchedStoryboardId : null,
matchedPanelIndex,
},
select: { id: true },
}
const createdRow = typeof voiceLineModel.upsert === 'function'
? await voiceLineModel.upsert(upsertArgs)
: (
process.env.NODE_ENV === 'test'
? await voiceLineModel.create({
data: upsertArgs.create,
select: { id: true },
})
: (() => { throw new Error('novelPromotionVoiceLine.upsert unavailable') })()
)
created.push(createdRow)
}
const nextLineIndexes = voiceLineRows
.map((row) => (typeof row.lineIndex === 'number' && Number.isFinite(row.lineIndex) ? Math.floor(row.lineIndex) : -1))
.filter((value) => value > 0)
if (nextLineIndexes.length === 0) {
await voiceLineModel.deleteMany({
where: {
episodeId,
},
})
} else {
await voiceLineModel.deleteMany({
where: {
episodeId,
lineIndex: {
notIn: nextLineIndexes,
},
},
})
}
return created
}, { timeout: 15000 })
await reportTaskProgress(job, 96, {
stage: 'script_to_storyboard_persist_done',
stageLabel: 'progress.stage.scriptToStoryboardPersistDone',
displayMode: 'detail',
})
return {
episodeId,
storyboardCount: persistedStoryboards.length,
panelCount: orchestratorResult.summary.totalPanelCount,
voiceLineCount: createdVoiceLines.length,
}
},
})
await assertTaskActive(job, 'script_to_storyboard_voice_persist')
const panelIdByStoryboardPanel = new Map<string, string>()
for (const storyboard of persistedStoryboards) {
for (const panel of storyboard.panels) {
panelIdByStoryboardPanel.set(`${storyboard.storyboardId}:${panel.panelIndex}`, panel.id)
if (!leaseResult.claimed || !leaseResult.result) {
return {
runId,
skipped: true,
episodeId,
}
}
const createdVoiceLines = await prisma.$transaction(async (tx) => {
const voiceLineModel = tx.novelPromotionVoiceLine as unknown as {
upsert?: (args: unknown) => Promise<{ id: string }>
create: (args: unknown) => Promise<{ id: string }>
deleteMany: (args: unknown) => Promise<unknown>
}
const created: Array<{ id: string }> = []
for (let i = 0; i < voiceLineRows.length; i += 1) {
const row = voiceLineRows[i] || {}
const matchedPanel = asJsonRecord(row.matchedPanel)
const matchedStoryboardId =
matchedPanel && typeof matchedPanel.storyboardId === 'string'
? matchedPanel.storyboardId.trim()
: null
const matchedPanelIndex = matchedPanel ? toPositiveInt(matchedPanel.panelIndex) : null
let matchedPanelId: string | null = null
if (matchedPanel !== null) {
if (!matchedStoryboardId || matchedPanelIndex === null) {
throw new Error(`voice line ${i + 1} has invalid matchedPanel reference`)
}
const panelKey = `${matchedStoryboardId}:${matchedPanelIndex}`
const resolvedPanelId = panelIdByStoryboardPanel.get(panelKey)
if (!resolvedPanelId) {
throw new Error(`voice line ${i + 1} references non-existent panel ${panelKey}`)
}
matchedPanelId = resolvedPanelId
}
if (typeof row.emotionStrength !== 'number' || !Number.isFinite(row.emotionStrength)) {
throw new Error(`voice line ${i + 1} is missing valid emotionStrength`)
}
const emotionStrength = Math.min(1, Math.max(0.1, row.emotionStrength))
if (typeof row.lineIndex !== 'number' || !Number.isFinite(row.lineIndex)) {
throw new Error(`voice line ${i + 1} is missing valid lineIndex`)
}
const lineIndex = Math.floor(row.lineIndex)
if (lineIndex <= 0) {
throw new Error(`voice line ${i + 1} has invalid lineIndex`)
}
if (typeof row.speaker !== 'string' || !row.speaker.trim()) {
throw new Error(`voice line ${i + 1} is missing valid speaker`)
}
if (typeof row.content !== 'string' || !row.content.trim()) {
throw new Error(`voice line ${i + 1} is missing valid content`)
}
const upsertArgs = {
where: {
episodeId_lineIndex: {
episodeId,
lineIndex,
},
},
create: {
episodeId,
lineIndex,
speaker: row.speaker.trim(),
content: row.content,
emotionStrength,
matchedPanelId,
matchedStoryboardId: matchedPanelId ? matchedStoryboardId : null,
matchedPanelIndex,
},
update: {
speaker: row.speaker.trim(),
content: row.content,
emotionStrength,
matchedPanelId,
matchedStoryboardId: matchedPanelId ? matchedStoryboardId : null,
matchedPanelIndex,
},
select: { id: true },
}
const createdRow = typeof voiceLineModel.upsert === 'function'
? await voiceLineModel.upsert(upsertArgs)
: (
process.env.NODE_ENV === 'test'
? await voiceLineModel.create({
data: upsertArgs.create,
select: { id: true },
})
: (() => { throw new Error('novelPromotionVoiceLine.upsert unavailable') })()
)
created.push(createdRow)
}
const nextLineIndexes = voiceLineRows
.map((row) => (typeof row.lineIndex === 'number' && Number.isFinite(row.lineIndex) ? Math.floor(row.lineIndex) : -1))
.filter((value) => value > 0)
if (nextLineIndexes.length === 0) {
await voiceLineModel.deleteMany({
where: {
episodeId,
},
})
} else {
await voiceLineModel.deleteMany({
where: {
episodeId,
lineIndex: {
notIn: nextLineIndexes,
},
},
})
}
return created
}, { timeout: 15000 })
await reportTaskProgress(job, 96, {
stage: 'script_to_storyboard_persist_done',
stageLabel: 'progress.stage.scriptToStoryboardPersistDone',
displayMode: 'detail',
})
return {
episodeId,
storyboardCount: persistedStoryboards.length,
panelCount: orchestratorResult.summary.totalPanelCount,
voiceLineCount: createdVoiceLines.length,
}
return leaseResult.result
}

View File

@@ -8,14 +8,14 @@ import {
import { withInternalLLMStreamCallbacks } from '@/lib/llm-observe/internal-stream-context'
import { logAIAnalysis } from '@/lib/logging/semantic'
import { onProjectNameAvailable } from '@/lib/logging/file-writer'
import { TaskTerminatedError } from '@/lib/task/errors'
import { reportTaskProgress } from '@/lib/workers/shared'
import { assertTaskActive } from '@/lib/workers/utils'
import {
runStoryToScriptOrchestrator,
type StoryToScriptStepMeta,
type StoryToScriptStepOutput,
type StoryToScriptOrchestratorResult,
} from '@/lib/novel-promotion/story-to-script/orchestrator'
import { runStoryToScriptGraph } from '@/lib/workflows/story-to-script/graph'
import { createWorkerLLMStreamCallbacks, createWorkerLLMStreamContext } from './llm-stream'
import type { TaskJobData } from '@/lib/task/types'
import {
@@ -31,6 +31,7 @@ import {
import { getPromptTemplate, PROMPT_IDS } from '@/lib/prompt-i18n'
import { resolveAnalysisModel } from './resolve-analysis-model'
import { createArtifact, listArtifacts } from '@/lib/run-runtime/service'
import { assertWorkflowRunActive, withWorkflowRunLease } from '@/lib/run-runtime/workflow-lease'
import { parseScreenplayPayload } from './screenplay-convert-helpers'
function isReasoningEffort(value: unknown): value is 'minimal' | 'low' | 'medium' | 'high' {
@@ -43,6 +44,10 @@ function resolveRetryClipId(retryStepKey: string): string | null {
return clipId || null
}
function buildWorkflowWorkerId(job: Job<TaskJobData>, label: string) {
return `${label}:${job.queueName}:${job.data.taskId}`
}
export async function handleStoryToScriptTask(job: Job<TaskJobData>) {
const payload = (job.data.payload || {}) as AnyObj
const projectId = job.data.projectId
@@ -125,22 +130,50 @@ export async function handleStoryToScriptTask(job: Job<TaskJobData>) {
if (!mergedContent.trim()) {
throw new Error('content is required')
}
const maxLength = 30000
const content = mergedContent.length > maxLength ? mergedContent.slice(0, maxLength) : mergedContent
await reportTaskProgress(job, 10, {
stage: 'story_to_script_prepare',
stageLabel: 'progress.stage.storyToScriptPrepare',
displayMode: 'detail',
})
const characterPromptTemplate = getPromptTemplate(PROMPT_IDS.NP_AGENT_CHARACTER_PROFILE, job.data.locale)
const locationPromptTemplate = getPromptTemplate(PROMPT_IDS.NP_SELECT_LOCATION, job.data.locale)
const clipPromptTemplate = getPromptTemplate(PROMPT_IDS.NP_AGENT_CLIP, job.data.locale)
const screenplayPromptTemplate = getPromptTemplate(PROMPT_IDS.NP_SCREENPLAY_CONVERSION, job.data.locale)
const maxLength = 30000
const content = mergedContent.length > maxLength ? mergedContent.slice(0, maxLength) : mergedContent
const payloadMeta = typeof payload.meta === 'object' && payload.meta !== null
? (payload.meta as AnyObj)
: {}
const runId = typeof payload.runId === 'string' && payload.runId.trim()
? payload.runId.trim()
: (typeof payloadMeta.runId === 'string' ? payloadMeta.runId.trim() : '')
if (!runId) {
throw new Error('runId is required for story_to_script pipeline')
}
const retryClipId = resolveRetryClipId(retryStepKey)
if (retryStepKey && !retryClipId) {
throw new Error(`unsupported retry step for story_to_script: ${retryStepKey}`)
}
const workerId = buildWorkflowWorkerId(job, 'story_to_script')
const assertRunActive = async (stage: string) => {
await assertWorkflowRunActive({
runId,
workerId,
stage,
})
}
const streamContext = createWorkerLLMStreamContext(job, 'story_to_script')
const callbacks = createWorkerLLMStreamCallbacks(job, streamContext)
const callbacks = createWorkerLLMStreamCallbacks(job, streamContext, {
assertActive: async (stage) => {
await assertRunActive(stage)
},
isActive: async () => {
try {
await assertRunActive('worker_llm_stream_probe')
return true
} catch (error) {
if (error instanceof TaskTerminatedError) {
return false
}
throw error
}
},
})
const runStep = async (
meta: StoryToScriptStepMeta,
@@ -151,7 +184,7 @@ export async function handleStoryToScriptTask(job: Job<TaskJobData>) {
void _maxOutputTokens
const stepAttempt = meta.stepAttempt
|| (retryStepKey && meta.stepId === retryStepKey ? retryStepAttempt : 1)
await assertTaskActive(job, `story_to_script_step:${meta.stepId}`)
await assertRunActive(`story_to_script_step:${meta.stepId}`)
const progress = 15 + Math.min(55, Math.floor((meta.stepIndex / Math.max(1, meta.stepTotal)) * 55))
await reportTaskProgress(job, progress, {
stage: 'story_to_script_step',
@@ -170,7 +203,6 @@ export async function handleStoryToScriptTask(job: Job<TaskJobData>) {
blockedBy: Array.isArray(meta.blockedBy) ? meta.blockedBy : [],
})
// Log prompt input
logAIAnalysis(job.data.userId, 'worker', projectId, project.name, {
action: `STORY_TO_SCRIPT_PROMPT:${action}`,
input: { stepId: meta.stepId, stepTitle: meta.stepTitle, prompt },
@@ -191,10 +223,8 @@ export async function handleStoryToScriptTask(job: Job<TaskJobData>) {
reasoning,
reasoningEffort,
})
// Ensure this step's stream terminal events are flushed before the orchestrator moves on.
await callbacks.flush()
// Log AI response output (full raw text included for debugging)
logAIAnalysis(job.data.userId, 'worker', projectId, project.name, {
action: `STORY_TO_SCRIPT_OUTPUT:${action}`,
output: {
@@ -213,319 +243,315 @@ export async function handleStoryToScriptTask(job: Job<TaskJobData>) {
}
}
let result: StoryToScriptOrchestratorResult | null = null
const payloadMeta = typeof payload.meta === 'object' && payload.meta !== null
? (payload.meta as AnyObj)
: {}
const runId = typeof payload.runId === 'string' && payload.runId.trim()
? payload.runId.trim()
: (typeof payloadMeta.runId === 'string' ? payloadMeta.runId.trim() : '')
if (!runId) {
throw new Error('runId is required for story_to_script pipeline')
}
const retryClipId = resolveRetryClipId(retryStepKey)
if (retryStepKey && !retryClipId) {
throw new Error(`unsupported retry step for story_to_script: ${retryStepKey}`)
}
const leaseResult = await withWorkflowRunLease({
runId,
userId: job.data.userId,
workerId,
run: async () => {
await reportTaskProgress(job, 10, {
stage: 'story_to_script_prepare',
stageLabel: 'progress.stage.storyToScriptPrepare',
displayMode: 'detail',
})
if (retryClipId) {
const splitArtifacts = await listArtifacts({
runId,
artifactType: 'clips.split',
limit: 1,
})
const latestSplit = splitArtifacts[0]
const splitPayload = latestSplit && typeof latestSplit.payload === 'object' && latestSplit.payload !== null
? (latestSplit.payload as Record<string, unknown>)
: null
if (!splitPayload) {
throw new Error('missing clips.split artifact for retry')
}
if (retryClipId) {
const splitArtifacts = await listArtifacts({
runId,
artifactType: 'clips.split',
limit: 1,
})
const latestSplit = splitArtifacts[0]
const splitPayload = latestSplit && typeof latestSplit.payload === 'object' && latestSplit.payload !== null
? (latestSplit.payload as Record<string, unknown>)
: null
if (!splitPayload) {
throw new Error('missing clips.split artifact for retry')
}
const clipRows = Array.isArray(splitPayload.clipList) ? splitPayload.clipList : []
const retryClip = clipRows.find((item) => {
if (!item || typeof item !== 'object' || Array.isArray(item)) return false
return asString((item as Record<string, unknown>).id).trim() === retryClipId
}) as Record<string, unknown> | undefined
if (!retryClip) {
throw new Error(`retry clip not found in artifact: ${retryClipId}`)
}
const clipRows = Array.isArray(splitPayload.clipList) ? splitPayload.clipList : []
const retryClip = clipRows.find((item) => {
if (!item || typeof item !== 'object' || Array.isArray(item)) return false
return asString((item as Record<string, unknown>).id).trim() === retryClipId
}) as Record<string, unknown> | undefined
if (!retryClip) {
throw new Error(`retry clip not found in artifact: ${retryClipId}`)
}
const clipContent = asString(retryClip.content)
if (!clipContent.trim()) {
throw new Error(`retry clip content is empty: ${retryClipId}`)
}
const clipContent = asString(retryClip.content)
if (!clipContent.trim()) {
throw new Error(`retry clip content is empty: ${retryClipId}`)
}
const screenplayPrompt = screenplayPromptTemplate
.replace('{clip_content}', clipContent)
.replace('{locations_lib_name}', asString(splitPayload.locationsLibName) || '无')
.replace('{characters_lib_name}', asString(splitPayload.charactersLibName) || '无')
.replace('{characters_introduction}', asString(splitPayload.charactersIntroduction) || '暂无角色介绍')
.replace('{clip_id}', retryClipId)
const screenplayPrompt = screenplayPromptTemplate
.replace('{clip_content}', clipContent)
.replace('{locations_lib_name}', asString(splitPayload.locationsLibName) || '无')
.replace('{characters_lib_name}', asString(splitPayload.charactersLibName) || '无')
.replace('{characters_introduction}', asString(splitPayload.charactersIntroduction) || '暂无角色介绍')
.replace('{clip_id}', retryClipId)
const stepMeta: StoryToScriptStepMeta = {
stepId: retryStepKey,
stepAttempt: retryStepAttempt,
stepTitle: 'progress.streamStep.screenplayConversion',
stepIndex: 1,
stepTotal: 1,
dependsOn: ['split_clips'],
retryable: true,
}
let screenplay: AnyObj | null = null
try {
const stepOutput = await (async () => {
const stepMeta: StoryToScriptStepMeta = {
stepId: retryStepKey,
stepAttempt: retryStepAttempt,
stepTitle: 'progress.streamStep.screenplayConversion',
stepIndex: 1,
stepTotal: 1,
dependsOn: ['split_clips'],
retryable: true,
}
let screenplay: AnyObj | null = null
try {
const stepOutput = await (async () => {
try {
return await withInternalLLMStreamCallbacks(
callbacks,
async () => await runStep(stepMeta, screenplayPrompt, 'screenplay_conversion', 2200),
)
} finally {
await callbacks.flush()
}
})()
screenplay = parseScreenplayPayload(stepOutput.text)
} catch (error) {
await createArtifact({
runId,
stepKey: retryStepKey,
artifactType: 'screenplay.clip',
refId: retryClipId,
payload: {
clipId: retryClipId,
success: false,
error: error instanceof Error ? error.message : String(error),
},
})
throw error
}
if (!screenplay) {
throw new Error('retry screenplay output is empty')
}
await createArtifact({
runId,
stepKey: retryStepKey,
artifactType: 'screenplay.clip',
refId: retryClipId,
payload: {
clipId: retryClipId,
success: true,
sceneCount: Array.isArray(screenplay.scenes) ? screenplay.scenes.length : 0,
screenplay,
},
})
let clipRecord = await prisma.novelPromotionClip.findFirst({
where: {
episodeId,
startText: asString(retryClip.startText) || null,
endText: asString(retryClip.endText) || null,
},
select: { id: true },
})
if (!clipRecord) {
clipRecord = await prisma.novelPromotionClip.create({
data: {
episodeId,
startText: asString(retryClip.startText) || null,
endText: asString(retryClip.endText) || null,
summary: asString(retryClip.summary),
location: asString(retryClip.location) || null,
characters: Array.isArray(retryClip.characters) ? JSON.stringify(retryClip.characters) : null,
content: clipContent,
},
select: { id: true },
})
}
await prisma.novelPromotionClip.update({
where: { id: clipRecord.id },
data: {
screenplay: JSON.stringify(screenplay),
},
})
await reportTaskProgress(job, 96, {
stage: 'story_to_script_persist_done',
stageLabel: 'progress.stage.storyToScriptPersistDone',
displayMode: 'detail',
message: 'retry step completed',
stepId: retryStepKey,
stepAttempt: retryStepAttempt,
stepTitle: 'progress.streamStep.screenplayConversion',
stepIndex: 1,
stepTotal: 1,
})
return {
episodeId,
clipCount: 1,
screenplaySuccessCount: 1,
screenplayFailedCount: 0,
persistedCharacters: 0,
persistedLocations: 0,
persistedClips: 1,
retryStepKey,
}
}
const result: StoryToScriptOrchestratorResult = await (async () => {
try {
return await withInternalLLMStreamCallbacks(
callbacks,
async () => await runStep(stepMeta, screenplayPrompt, 'screenplay_conversion', 2200),
async () => await runStoryToScriptOrchestrator({
concurrency: workflowConcurrency.analysis,
content,
baseCharacters: (novelData.characters || []).map((item) => item.name),
baseLocations: (novelData.locations || []).map((item) => item.name),
baseCharacterIntroductions: (novelData.characters || []).map((item) => ({
name: item.name,
introduction: item.introduction || '',
})),
promptTemplates: {
characterPromptTemplate,
locationPromptTemplate,
clipPromptTemplate,
screenplayPromptTemplate,
},
runStep,
}),
)
} finally {
await callbacks.flush()
}
})()
screenplay = parseScreenplayPayload(stepOutput.text)
} catch (error) {
await createArtifact({
runId,
stepKey: retryStepKey,
artifactType: 'screenplay.clip',
refId: retryClipId,
stepKey: 'analyze_characters',
artifactType: 'analysis.characters',
refId: episodeId,
payload: {
clipId: retryClipId,
success: false,
error: error instanceof Error ? error.message : String(error),
characters: result.analyzedCharacters,
raw: result.charactersObject,
},
})
throw error
}
if (!screenplay) {
throw new Error('retry screenplay output is empty')
}
await createArtifact({
runId,
stepKey: retryStepKey,
artifactType: 'screenplay.clip',
refId: retryClipId,
payload: {
clipId: retryClipId,
success: true,
sceneCount: Array.isArray(screenplay.scenes) ? screenplay.scenes.length : 0,
screenplay,
},
})
let clipRecord = await prisma.novelPromotionClip.findFirst({
where: {
episodeId,
startText: asString(retryClip.startText) || null,
endText: asString(retryClip.endText) || null,
},
select: { id: true },
})
if (!clipRecord) {
clipRecord = await prisma.novelPromotionClip.create({
data: {
episodeId,
startText: asString(retryClip.startText) || null,
endText: asString(retryClip.endText) || null,
summary: asString(retryClip.summary),
location: asString(retryClip.location) || null,
characters: Array.isArray(retryClip.characters) ? JSON.stringify(retryClip.characters) : null,
content: clipContent,
await createArtifact({
runId,
stepKey: 'analyze_locations',
artifactType: 'analysis.locations',
refId: episodeId,
payload: {
locations: result.analyzedLocations,
raw: result.locationsObject,
},
})
await createArtifact({
runId,
stepKey: 'split_clips',
artifactType: 'clips.split',
refId: episodeId,
payload: {
clipList: result.clipList,
charactersLibName: result.charactersLibName,
locationsLibName: result.locationsLibName,
charactersIntroduction: result.charactersIntroduction,
},
})
for (const screenplayResult of result.screenplayResults) {
await createArtifact({
runId,
stepKey: `screenplay_${screenplayResult.clipId}`,
artifactType: 'screenplay.clip',
refId: screenplayResult.clipId,
payload: {
...screenplayResult,
},
})
}
if (result.summary.screenplayFailedCount > 0) {
const failed = result.screenplayResults.filter((item) => !item.success)
const preview = failed
.slice(0, 3)
.map((item) => `${item.clipId}:${item.error || 'unknown error'}`)
.join(' | ')
throw new Error(
`STORY_TO_SCRIPT_PARTIAL_FAILED: ${result.summary.screenplayFailedCount}/${result.summary.clipCount} screenplay steps failed. ${preview}`,
)
}
await reportTaskProgress(job, 80, {
stage: 'story_to_script_persist',
stageLabel: 'progress.stage.storyToScriptPersist',
displayMode: 'detail',
})
await assertRunActive('story_to_script_persist')
const episodeStillExists = await prisma.novelPromotionEpisode.findUnique({
where: { id: episodeId },
select: { id: true },
})
}
await prisma.novelPromotionClip.update({
where: { id: clipRecord.id },
data: {
screenplay: JSON.stringify(screenplay),
},
})
if (!episodeStillExists) {
throw new Error(`NOT_FOUND: Episode ${episodeId} was deleted while the task was running`)
}
await reportTaskProgress(job, 96, {
stage: 'story_to_script_persist_done',
stageLabel: 'progress.stage.storyToScriptPersistDone',
displayMode: 'detail',
message: 'retry step completed',
stepId: retryStepKey,
stepAttempt: retryStepAttempt,
stepTitle: 'progress.streamStep.screenplayConversion',
stepIndex: 1,
stepTotal: 1,
})
return {
episodeId,
clipCount: 1,
screenplaySuccessCount: 1,
screenplayFailedCount: 0,
persistedCharacters: 0,
persistedLocations: 0,
persistedClips: 1,
retryStepKey,
}
}
const pipelineState = await (async () => {
try {
return await withInternalLLMStreamCallbacks(
callbacks,
async () => await runStoryToScriptGraph({
runId,
projectId,
userId: job.data.userId,
concurrency: workflowConcurrency.analysis,
content,
baseCharacters: (novelData.characters || []).map((item) => item.name),
baseLocations: (novelData.locations || []).map((item) => item.name),
baseCharacterIntroductions: (novelData.characters || []).map((item) => ({
name: item.name,
introduction: item.introduction || '',
})),
promptTemplates: {
characterPromptTemplate,
locationPromptTemplate,
clipPromptTemplate,
screenplayPromptTemplate,
},
runStep,
}),
const existingCharacterNames = new Set<string>(
(novelData.characters || []).map((item) => String(item.name || '').toLowerCase()),
)
const existingLocationNames = new Set<string>(
(novelData.locations || []).map((item) => String(item.name || '').toLowerCase()),
)
} finally {
await callbacks.flush()
}
})()
result = pipelineState.orchestratorResult
if (!result) {
throw new Error('story_to_script orchestrator produced no result')
}
const createdCharacters = await persistAnalyzedCharacters({
projectInternalId: novelData.id,
existingNames: existingCharacterNames,
analyzedCharacters: result.analyzedCharacters,
})
await createArtifact({
runId,
stepKey: 'analyze_characters',
artifactType: 'analysis.characters',
refId: episodeId,
payload: {
characters: result.analyzedCharacters,
raw: result.charactersObject,
const createdLocations = await persistAnalyzedLocations({
projectInternalId: novelData.id,
existingNames: existingLocationNames,
analyzedLocations: result.analyzedLocations,
})
const createdClipRows = await persistClips({
episodeId,
clipList: result.clipList,
})
const clipIdMap = new Map(createdClipRows.map((item) => [item.clipKey, item.id]))
for (const screenplayResult of result.screenplayResults) {
if (!screenplayResult.success || !screenplayResult.screenplay) continue
const clipRecordId = resolveClipRecordId(clipIdMap, screenplayResult.clipId)
if (!clipRecordId) continue
await prisma.novelPromotionClip.update({
where: { id: clipRecordId },
data: {
screenplay: JSON.stringify(screenplayResult.screenplay),
},
})
}
await reportTaskProgress(job, 96, {
stage: 'story_to_script_persist_done',
stageLabel: 'progress.stage.storyToScriptPersistDone',
displayMode: 'detail',
})
return {
episodeId,
clipCount: result.summary.clipCount,
screenplaySuccessCount: result.summary.screenplaySuccessCount,
screenplayFailedCount: result.summary.screenplayFailedCount,
persistedCharacters: createdCharacters.length,
persistedLocations: createdLocations.length,
persistedClips: createdClipRows.length,
}
},
})
await createArtifact({
runId,
stepKey: 'analyze_locations',
artifactType: 'analysis.locations',
refId: episodeId,
payload: {
locations: result.analyzedLocations,
raw: result.locationsObject,
},
})
await createArtifact({
runId,
stepKey: 'split_clips',
artifactType: 'clips.split',
refId: episodeId,
payload: {
clipList: result.clipList,
charactersLibName: result.charactersLibName,
locationsLibName: result.locationsLibName,
charactersIntroduction: result.charactersIntroduction,
},
})
for (const screenplayResult of result.screenplayResults) {
await createArtifact({
if (!leaseResult.claimed || !leaseResult.result) {
return {
runId,
stepKey: `screenplay_${screenplayResult.clipId}`,
artifactType: 'screenplay.clip',
refId: screenplayResult.clipId,
payload: {
...screenplayResult,
},
})
}
if (result.summary.screenplayFailedCount > 0) {
const failed = result.screenplayResults.filter((item) => !item.success)
const preview = failed
.slice(0, 3)
.map((item) => `${item.clipId}:${item.error || 'unknown error'}`)
.join(' | ')
throw new Error(
`STORY_TO_SCRIPT_PARTIAL_FAILED: ${result.summary.screenplayFailedCount}/${result.summary.clipCount} screenplay steps failed. ${preview}`,
)
}
await reportTaskProgress(job, 80, {
stage: 'story_to_script_persist',
stageLabel: 'progress.stage.storyToScriptPersist',
displayMode: 'detail',
})
await assertTaskActive(job, 'story_to_script_persist')
// Re-verify episode still exists before persisting — it may have been deleted
// while AI steps were running, which would cause a Prisma foreign key error.
const episodeStillExists = await prisma.novelPromotionEpisode.findUnique({
where: { id: episodeId },
select: { id: true },
})
if (!episodeStillExists) {
throw new Error(`NOT_FOUND: Episode ${episodeId} was deleted while the task was running`)
}
const existingCharacterNames = new Set<string>(
(novelData.characters || []).map((item) => String(item.name || '').toLowerCase()),
)
const existingLocationNames = new Set<string>(
(novelData.locations || []).map((item) => String(item.name || '').toLowerCase()),
)
const createdCharacters = await persistAnalyzedCharacters({
projectInternalId: novelData.id,
existingNames: existingCharacterNames,
analyzedCharacters: result.analyzedCharacters,
})
const createdLocations = await persistAnalyzedLocations({
projectInternalId: novelData.id,
existingNames: existingLocationNames,
analyzedLocations: result.analyzedLocations,
})
const createdClipRows = await persistClips({
episodeId,
clipList: result.clipList,
})
const clipIdMap = new Map(createdClipRows.map((item) => [item.clipKey, item.id]))
for (const screenplayResult of result.screenplayResults) {
if (!screenplayResult.success || !screenplayResult.screenplay) continue
const clipRecordId = resolveClipRecordId(clipIdMap, screenplayResult.clipId)
if (!clipRecordId) continue
await prisma.novelPromotionClip.update({
where: { id: clipRecordId },
data: {
screenplay: JSON.stringify(screenplayResult.screenplay),
},
})
}
await reportTaskProgress(job, 96, {
stage: 'story_to_script_persist_done',
stageLabel: 'progress.stage.storyToScriptPersistDone',
displayMode: 'detail',
})
return {
episodeId,
clipCount: result.summary.clipCount,
screenplaySuccessCount: result.summary.screenplaySuccessCount,
screenplayFailedCount: result.summary.screenplayFailedCount,
persistedCharacters: createdCharacters.length,
persistedLocations: createdLocations.length,
persistedClips: createdClipRows.length,
skipped: true,
episodeId,
}
}
return leaseResult.result
}

View File

@@ -13,13 +13,16 @@ import {
updateTaskBillingInfo,
} from '@/lib/task/service'
import { publishTaskEvent, publishTaskStreamEvent } from '@/lib/task/publisher'
import { TASK_EVENT_TYPE, TASK_TYPE, type TaskBillingInfo, type TaskJobData } from '@/lib/task/types'
import { TASK_EVENT_TYPE, TASK_SSE_EVENT_TYPE, TASK_TYPE, type SSEEvent, type TaskBillingInfo, type TaskJobData } from '@/lib/task/types'
import { buildTaskProgressMessage, getTaskStageLabel } from '@/lib/task/progress-message'
import { normalizeAnyError } from '@/lib/errors/normalize'
import { rollbackTaskBilling, settleTaskBilling } from '@/lib/billing'
import { withTextUsageCollection } from '@/lib/billing/runtime-usage'
import { onProjectNameAvailable } from '@/lib/logging/file-writer'
import type { NormalizedError } from '@/lib/errors/types'
import { mapTaskSSEEventToRunEvents } from '@/lib/run-runtime/task-bridge'
import { publishRunEvent } from '@/lib/run-runtime/publisher'
import { RUN_EVENT_TYPE } from '@/lib/run-runtime/types'
function toObject(value: unknown): Record<string, unknown> {
if (!value || typeof value !== 'object' || Array.isArray(value)) return {}
@@ -76,6 +79,12 @@ function withFlowFields(jobData: TaskJobData, payload?: Record<string, unknown>
return base
}
function resolveRunId(jobData: TaskJobData): string | null {
const flowFields = extractFlowFields(jobData)
const runId = flowFields.runId
return typeof runId === 'string' && runId.trim() ? runId.trim() : null
}
function buildWorkerLogger(data: TaskJobData, queueName: string) {
return createScopedLogger({
module: `worker.${queueName}`,
@@ -91,10 +100,131 @@ const RUN_STREAM_REPLAY_PERSIST_TYPES = new Set<string>([
TASK_TYPE.SCRIPT_TO_STORYBOARD_RUN,
])
const DIRECT_RUN_EVENT_TASK_TYPES = new Set<string>([
TASK_TYPE.STORY_TO_SCRIPT_RUN,
TASK_TYPE.SCRIPT_TO_STORYBOARD_RUN,
])
function shouldPersistRunStreamReplay(taskType: string): boolean {
return RUN_STREAM_REPLAY_PERSIST_TYPES.has(taskType)
}
function shouldDirectPublishRunEvents(taskType: string): boolean {
return DIRECT_RUN_EVENT_TASK_TYPES.has(taskType)
}
async function publishMirroredRunEvents(params: {
taskId: string
projectId: string
userId: string
taskType: string
targetType: string
targetId: string
episodeId?: string | null
eventType: typeof TASK_SSE_EVENT_TYPE[keyof typeof TASK_SSE_EVENT_TYPE]
payload?: Record<string, unknown> | null
}) {
if (!shouldDirectPublishRunEvents(params.taskType)) return
const message: SSEEvent = {
id: `direct:${params.taskId}:${Date.now().toString(36)}:${Math.random().toString(36).slice(2, 8)}`,
type: params.eventType,
taskId: params.taskId,
projectId: params.projectId,
userId: params.userId,
ts: new Date().toISOString(),
taskType: params.taskType,
targetType: params.targetType,
targetId: params.targetId,
episodeId: params.episodeId || null,
payload: (params.payload || null) as SSEEvent['payload'],
}
const runEvents = mapTaskSSEEventToRunEvents(message)
for (const event of runEvents) {
await publishRunEvent(event)
}
}
async function publishLifecycleEvent(params: {
taskId: string
projectId: string
userId: string
type: typeof TASK_EVENT_TYPE[keyof typeof TASK_EVENT_TYPE]
taskType: string
targetType: string
targetId: string
episodeId?: string | null
payload?: Record<string, unknown> | null
persist?: boolean
}) {
await publishTaskEvent({
taskId: params.taskId,
projectId: params.projectId,
userId: params.userId,
type: params.type,
taskType: params.taskType,
targetType: params.targetType,
targetId: params.targetId,
episodeId: params.episodeId || null,
payload: params.payload,
persist: params.persist,
})
await publishMirroredRunEvents({
taskId: params.taskId,
projectId: params.projectId,
userId: params.userId,
taskType: params.taskType,
targetType: params.targetType,
targetId: params.targetId,
episodeId: params.episodeId || null,
eventType: TASK_SSE_EVENT_TYPE.LIFECYCLE,
payload: {
...params.payload,
lifecycleType:
params.type === TASK_EVENT_TYPE.PROGRESS
? TASK_EVENT_TYPE.PROCESSING
: params.type,
},
})
}
async function publishStreamEvent(params: {
taskId: string
projectId: string
userId: string
taskType: string
targetType: string
targetId: string
episodeId?: string | null
payload?: Record<string, unknown> | null
persist?: boolean
}) {
await publishTaskStreamEvent({
taskId: params.taskId,
projectId: params.projectId,
userId: params.userId,
taskType: params.taskType,
targetType: params.targetType,
targetId: params.targetId,
episodeId: params.episodeId || null,
payload: params.payload,
persist: params.persist,
})
await publishMirroredRunEvents({
taskId: params.taskId,
projectId: params.projectId,
userId: params.userId,
taskType: params.taskType,
targetType: params.targetType,
targetId: params.targetId,
episodeId: params.episodeId || null,
eventType: TASK_SSE_EVENT_TYPE.STREAM,
payload: params.payload,
})
}
function resolveQueueAttempts(job: Job<TaskJobData>): number {
const attempts = (job.opts?.attempts ?? 1)
const value = typeof attempts === 'number' && Number.isFinite(attempts) ? Math.floor(attempts) : 1
@@ -243,7 +373,26 @@ export async function withTaskLifecycle(job: Job<TaskJobData>, handler: (job: Jo
requestId: data.trace?.requestId || null,
},
})
await publishTaskEvent({
if (shouldDirectPublishRunEvents(data.type)) {
const runId = resolveRunId(data)
if (runId) {
await publishRunEvent({
runId,
projectId: data.projectId,
userId: data.userId,
eventType: RUN_EVENT_TYPE.RUN_START,
payload: {
...processingPayload,
message: buildTaskProgressMessage({
eventType: TASK_EVENT_TYPE.PROCESSING,
taskType: data.type,
payload: processingPayload,
}),
},
})
}
}
await publishLifecycleEvent({
taskId,
projectId: data.projectId,
userId: data.userId,
@@ -297,7 +446,7 @@ export async function withTaskLifecycle(job: Job<TaskJobData>, handler: (job: Jo
requestId: data.trace?.requestId || null,
},
})
await publishTaskEvent({
await publishLifecycleEvent({
taskId,
projectId: data.projectId,
userId: data.userId,
@@ -411,7 +560,7 @@ export async function withTaskLifecycle(job: Job<TaskJobData>, handler: (job: Jo
})
try {
await publishTaskEvent({
await publishLifecycleEvent({
taskId,
projectId: data.projectId,
userId: data.userId,
@@ -468,7 +617,7 @@ export async function withTaskLifecycle(job: Job<TaskJobData>, handler: (job: Jo
if (process.env.NODE_ENV !== 'production' && error instanceof Error && typeof error.stack === 'string') {
failedPayload.errorStack = error.stack.slice(0, 8000)
}
await publishTaskEvent({
await publishLifecycleEvent({
taskId,
projectId: data.projectId,
userId: data.userId,
@@ -529,7 +678,7 @@ export async function reportTaskProgress(job: Job<TaskJobData>, progress: number
if (!updated) {
return
}
await publishTaskEvent({
await publishLifecycleEvent({
taskId: job.data.taskId,
projectId: job.data.projectId,
userId: job.data.userId,
@@ -562,7 +711,7 @@ export async function reportTaskStreamChunk(
message: payload?.message || (chunk.kind === 'reasoning' ? 'progress.runtime.llm.reasoning' : 'progress.runtime.llm.output'),
})
await publishTaskStreamEvent({
await publishStreamEvent({
taskId: job.data.taskId,
projectId: job.data.projectId,
userId: job.data.userId,

View File

@@ -0,0 +1,95 @@
const STORY_TO_SCRIPT_WORKFLOW = 'story_to_script_run'
const SCRIPT_TO_STORYBOARD_WORKFLOW = 'script_to_storyboard_run'
function uniqueStepKeys(stepKeys: Iterable<string>): string[] {
return Array.from(new Set(Array.from(stepKeys).filter((stepKey) => stepKey.trim().length > 0)))
}
function resolveStoryToScriptInvalidation(params: {
stepKey: string
existingStepKeys: ReadonlySet<string>
}): string[] {
const affected = new Set<string>([params.stepKey])
if (params.stepKey === 'analyze_characters' || params.stepKey === 'analyze_locations') {
if (params.existingStepKeys.has('split_clips')) {
affected.add('split_clips')
}
for (const stepKey of params.existingStepKeys) {
if (stepKey.startsWith('screenplay_')) {
affected.add(stepKey)
}
}
} else if (params.stepKey === 'split_clips') {
for (const stepKey of params.existingStepKeys) {
if (stepKey.startsWith('screenplay_')) {
affected.add(stepKey)
}
}
}
return uniqueStepKeys(affected)
}
type StoryboardPhase = 'phase1' | 'phase2_cinematography' | 'phase2_acting' | 'phase3_detail'
function parseStoryboardStepKey(stepKey: string): { clipId: string; phase: StoryboardPhase } | null {
const match = /^clip_(.+)_(phase1|phase2_cinematography|phase2_acting|phase3_detail)$/.exec(stepKey.trim())
if (!match) return null
const clipId = (match[1] || '').trim()
const phase = match[2] as StoryboardPhase
if (!clipId) return null
return { clipId, phase }
}
function resolveScriptToStoryboardInvalidation(params: {
stepKey: string
existingStepKeys: ReadonlySet<string>
}): string[] {
const affected = new Set<string>([params.stepKey])
if (params.stepKey === 'voice_analyze') {
return uniqueStepKeys(affected)
}
const parsed = parseStoryboardStepKey(params.stepKey)
if (!parsed) {
return uniqueStepKeys(affected)
}
const clipPrefix = `clip_${parsed.clipId}_`
if (parsed.phase === 'phase1') {
affected.add(`${clipPrefix}phase2_cinematography`)
affected.add(`${clipPrefix}phase2_acting`)
affected.add(`${clipPrefix}phase3_detail`)
affected.add('voice_analyze')
return uniqueStepKeys(Array.from(affected).filter((stepKey) => params.existingStepKeys.has(stepKey)))
}
if (parsed.phase === 'phase2_cinematography' || parsed.phase === 'phase2_acting') {
affected.add(`${clipPrefix}phase3_detail`)
affected.add('voice_analyze')
return uniqueStepKeys(Array.from(affected).filter((stepKey) => params.existingStepKeys.has(stepKey)))
}
affected.add('voice_analyze')
return uniqueStepKeys(Array.from(affected).filter((stepKey) => params.existingStepKeys.has(stepKey)))
}
export function resolveRetryInvalidationStepKeys(params: {
workflowType: string
stepKey: string
existingStepKeys: string[]
}): string[] {
const existingStepKeys = new Set(params.existingStepKeys)
if (params.workflowType === STORY_TO_SCRIPT_WORKFLOW) {
return resolveStoryToScriptInvalidation({
stepKey: params.stepKey,
existingStepKeys,
})
}
if (params.workflowType === SCRIPT_TO_STORYBOARD_WORKFLOW) {
return resolveScriptToStoryboardInvalidation({
stepKey: params.stepKey,
existingStepKeys,
})
}
return uniqueStepKeys([params.stepKey].filter((stepKey) => existingStepKeys.has(stepKey)))
}

View File

@@ -1,110 +0,0 @@
import { runPipelineGraph, type PipelineGraphState } from '@/lib/run-runtime/pipeline-graph'
import {
runScriptToStoryboardOrchestrator,
type ScriptToStoryboardOrchestratorResult,
type ScriptToStoryboardPromptTemplates,
type ScriptToStoryboardStepMeta,
type ScriptToStoryboardStepOutput,
JsonParseError,
} from '@/lib/novel-promotion/script-to-storyboard/orchestrator'
import type { CharacterAsset, LocationAsset } from '@/lib/storyboard-phases'
type ClipInput = {
id: string
content: string | null
characters: string | null
location: string | null
screenplay: string | null
}
type NovelPromotionData = {
characters: CharacterAsset[]
locations: LocationAsset[]
}
export type ScriptToStoryboardGraphState = PipelineGraphState & {
orchestratorResult: ScriptToStoryboardOrchestratorResult | null
}
export type ScriptToStoryboardGraphInput = {
runId: string
projectId: string
userId: string
concurrency: number
clips: ClipInput[]
novelPromotionData: NovelPromotionData
promptTemplates: ScriptToStoryboardPromptTemplates
runStep: (
meta: ScriptToStoryboardStepMeta,
prompt: string,
action: string,
maxOutputTokens: number,
) => Promise<ScriptToStoryboardStepOutput>
onParseError?: (error: JsonParseError) => Promise<void> | void
}
export async function runScriptToStoryboardGraph(
input: ScriptToStoryboardGraphInput,
): Promise<ScriptToStoryboardGraphState> {
const initialState: ScriptToStoryboardGraphState = {
refs: {},
meta: {},
orchestratorResult: null,
}
try {
return await runPipelineGraph({
runId: input.runId,
projectId: input.projectId,
userId: input.userId,
state: initialState,
nodes: [
{
key: 'script_to_storyboard_orchestrator',
title: 'script_to_storyboard_orchestrator',
maxAttempts: 2,
timeoutMs: 1000 * 60 * 20,
run: async (context) => {
const nextResult = await runScriptToStoryboardOrchestrator({
concurrency: input.concurrency,
clips: input.clips,
novelPromotionData: input.novelPromotionData,
promptTemplates: input.promptTemplates,
runStep: input.runStep,
})
context.state.orchestratorResult = nextResult
return {
output: {
clipCount: nextResult.summary.clipCount,
totalPanelCount: nextResult.summary.totalPanelCount,
},
}
},
},
{
key: 'script_to_storyboard_validate',
title: 'script_to_storyboard_validate',
maxAttempts: 1,
timeoutMs: 1000 * 30,
run: async (context) => {
if (!context.state.orchestratorResult) {
throw new Error('script_to_storyboard orchestrator produced no result')
}
return {
output: {
validated: true,
totalPanelCount: context.state.orchestratorResult.summary.totalPanelCount,
},
}
},
},
],
})
} catch (error) {
if (error instanceof JsonParseError) {
await input.onParseError?.(error)
}
throw error
}
}

View File

@@ -1,103 +0,0 @@
import { runPipelineGraph, type PipelineGraphState } from '@/lib/run-runtime/pipeline-graph'
import {
runStoryToScriptOrchestrator,
type StoryToScriptOrchestratorResult,
type StoryToScriptPromptTemplates,
type StoryToScriptStepMeta,
type StoryToScriptStepOutput,
} from '@/lib/novel-promotion/story-to-script/orchestrator'
export type StoryToScriptGraphState = PipelineGraphState & {
orchestratorResult: StoryToScriptOrchestratorResult | null
}
export type StoryToScriptGraphInput = {
runId: string
projectId: string
userId: string
concurrency: number
content: string
baseCharacters: string[]
baseLocations: string[]
baseCharacterIntroductions: Array<{ name: string; introduction?: string | null }>
promptTemplates: StoryToScriptPromptTemplates
runStep: (
meta: StoryToScriptStepMeta,
prompt: string,
action: string,
maxOutputTokens: number,
) => Promise<StoryToScriptStepOutput>
}
export async function runStoryToScriptGraph(
input: StoryToScriptGraphInput,
): Promise<StoryToScriptGraphState> {
const initialState: StoryToScriptGraphState = {
refs: {},
meta: {},
orchestratorResult: null,
}
return await runPipelineGraph({
runId: input.runId,
projectId: input.projectId,
userId: input.userId,
state: initialState,
nodes: [
{
key: 'story_to_script_orchestrator',
title: 'story_to_script_orchestrator',
maxAttempts: 2,
timeoutMs: 1000 * 60 * 15,
run: async (context) => {
const orchestratorResult = await runStoryToScriptOrchestrator({
content: input.content,
concurrency: input.concurrency,
baseCharacters: input.baseCharacters,
baseLocations: input.baseLocations,
baseCharacterIntroductions: input.baseCharacterIntroductions,
promptTemplates: input.promptTemplates,
runStep: input.runStep,
})
context.state.orchestratorResult = orchestratorResult
return {
output: {
clipCount: orchestratorResult.summary.clipCount,
screenplaySuccessCount: orchestratorResult.summary.screenplaySuccessCount,
screenplayFailedCount: orchestratorResult.summary.screenplayFailedCount,
},
}
},
},
{
key: 'story_to_script_validate',
title: 'story_to_script_validate',
maxAttempts: 1,
timeoutMs: 1000 * 30,
run: async (context) => {
const result = context.state.orchestratorResult
if (!result) {
throw new Error('story_to_script orchestrator produced no result')
}
if (result.summary.screenplayFailedCount > 0) {
const failed = result.screenplayResults.filter((item) => !item.success)
const preview = failed
.slice(0, 3)
.map((item) => `${item.clipId}:${item.error || 'unknown error'}`)
.join(' | ')
throw new Error(
`STORY_TO_SCRIPT_PARTIAL_FAILED: ${result.summary.screenplayFailedCount}/${result.summary.clipCount} screenplay steps failed. ${preview}`,
)
}
return {
output: {
validated: true,
clipCount: result.summary.clipCount,
},
}
},
},
],
})
}