diff --git a/apps/sim/lib/workflows/persistence/utils.ts b/apps/sim/lib/workflows/persistence/utils.ts index e33c531cf6..90294c42bc 100644 --- a/apps/sim/lib/workflows/persistence/utils.ts +++ b/apps/sim/lib/workflows/persistence/utils.ts @@ -463,9 +463,9 @@ export async function loadWorkflowFromNormalizedTables( if (subflow.type === SUBFLOW_TYPES.LOOP) { const loopType = (config as Loop).loopType === 'for' || - (config as Loop).loopType === 'forEach' || - (config as Loop).loopType === 'while' || - (config as Loop).loopType === 'doWhile' + (config as Loop).loopType === 'forEach' || + (config as Loop).loopType === 'while' || + (config as Loop).loopType === 'doWhile' ? (config as Loop).loopType : 'for' @@ -502,7 +502,7 @@ export async function loadWorkflowFromNormalizedTables( distribution: (config as Parallel).distribution ?? '', parallelType: (config as Parallel).parallelType === 'count' || - (config as Parallel).parallelType === 'collection' + (config as Parallel).parallelType === 'collection' ? (config as Parallel).parallelType : 'count', enabled: finalBlocks[subflow.id]?.enabled ?? true, @@ -545,6 +545,9 @@ export async function saveWorkflowToNormalizedTables( tx.delete(workflowSubflows).where(eq(workflowSubflows.workflowId, workflowId)), ]) + const CHUNK_SIZE = 50 + + // Insert blocks if (Object.keys(state.blocks).length > 0) { const blockInserts = Object.values(state.blocks).map((block) => ({ id: block.id, @@ -566,9 +569,15 @@ export async function saveWorkflowToNormalizedTables( locked: block.locked ?? false, })) - await tx.insert(workflowBlocks).values(blockInserts) + // SQLite limits bound parameters to 999 per statement. + // workflowBlocks has 17 fields -> max safe chunk = floor(999/17) = 58. + // Using 50 for a conservative margin. + for (let i = 0; i < blockInserts.length; i += CHUNK_SIZE) { + await tx.insert(workflowBlocks).values(blockInserts.slice(i, i + CHUNK_SIZE)) + } } + // Insert edges if (state.edges.length > 0) { const edgeInserts = state.edges.map((edge) => ({ id: edge.id, @@ -579,7 +588,9 @@ export async function saveWorkflowToNormalizedTables( targetHandle: edge.targetHandle || null, })) - await tx.insert(workflowEdges).values(edgeInserts) + for (let i = 0; i < edgeInserts.length; i += CHUNK_SIZE) { + await tx.insert(workflowEdges).values(edgeInserts.slice(i, i + CHUNK_SIZE)) + } } const subflowInserts: SubflowInsert[] = [] @@ -603,7 +614,9 @@ export async function saveWorkflowToNormalizedTables( }) if (subflowInserts.length > 0) { - await tx.insert(workflowSubflows).values(subflowInserts) + for (let i = 0; i < subflowInserts.length; i += CHUNK_SIZE) { + await tx.insert(workflowSubflows).values(subflowInserts.slice(i, i + CHUNK_SIZE)) + } } } @@ -806,11 +819,11 @@ export function regenerateWorkflowStateIds(state: RegenerateStateInput): Regener blockIdMapping.set(oldId, crypto.randomUUID()) }) - // Map edge IDs + // Map edge IDs - ;(state.edges || []).forEach((edge: Edge) => { - edgeIdMapping.set(edge.id, crypto.randomUUID()) - }) + ; (state.edges || []).forEach((edge: Edge) => { + edgeIdMapping.set(edge.id, crypto.randomUUID()) + }) // Map loop IDs Object.keys(state.loops || {}).forEach((oldId) => { @@ -884,7 +897,7 @@ export function regenerateWorkflowStateIds(state: RegenerateStateInput): Regener newBlocks[newId] = newBlock }) - // Regenerate edges with updated source/target references + // Regenerate edges with updated source/target references ;(state.edges || []).forEach((edge: Edge) => { const newId = edgeIdMapping.get(edge.id)!