mirror of
http://112.124.100.131/huang.ze/ebiz-dify-ai.git
synced 2025-12-23 09:46:53 +08:00
Introduce Plugins (#13836)
Signed-off-by: yihong0618 <zouzou0208@gmail.com> Signed-off-by: -LAN- <laipz8200@outlook.com> Signed-off-by: xhe <xw897002528@gmail.com> Signed-off-by: dependabot[bot] <support@github.com> Co-authored-by: takatost <takatost@gmail.com> Co-authored-by: kurokobo <kuro664@gmail.com> Co-authored-by: Novice Lee <novicelee@NoviPro.local> Co-authored-by: zxhlyh <jasonapring2015@outlook.com> Co-authored-by: AkaraChen <akarachen@outlook.com> Co-authored-by: Yi <yxiaoisme@gmail.com> Co-authored-by: Joel <iamjoel007@gmail.com> Co-authored-by: JzoNg <jzongcode@gmail.com> Co-authored-by: twwu <twwu@dify.ai> Co-authored-by: Hiroshi Fujita <fujita-h@users.noreply.github.com> Co-authored-by: AkaraChen <85140972+AkaraChen@users.noreply.github.com> Co-authored-by: NFish <douxc512@gmail.com> Co-authored-by: Wu Tianwei <30284043+WTW0313@users.noreply.github.com> Co-authored-by: 非法操作 <hjlarry@163.com> Co-authored-by: Novice <857526207@qq.com> Co-authored-by: Hiroki Nagai <82458324+nagaihiroki-git@users.noreply.github.com> Co-authored-by: Gen Sato <52241300+halogen22@users.noreply.github.com> Co-authored-by: eux <euxuuu@gmail.com> Co-authored-by: huangzhuo1949 <167434202+huangzhuo1949@users.noreply.github.com> Co-authored-by: huangzhuo <huangzhuo1@xiaomi.com> Co-authored-by: lotsik <lotsik@mail.ru> Co-authored-by: crazywoola <100913391+crazywoola@users.noreply.github.com> Co-authored-by: nite-knite <nkCoding@gmail.com> Co-authored-by: Jyong <76649700+JohnJyong@users.noreply.github.com> Co-authored-by: github-actions[bot] <41898282+github-actions[bot]@users.noreply.github.com> Co-authored-by: gakkiyomi <gakkiyomi@aliyun.com> Co-authored-by: CN-P5 <heibai2006@gmail.com> Co-authored-by: CN-P5 <heibai2006@qq.com> Co-authored-by: Chuehnone <1897025+chuehnone@users.noreply.github.com> Co-authored-by: yihong <zouzou0208@gmail.com> Co-authored-by: Kevin9703 <51311316+Kevin9703@users.noreply.github.com> Co-authored-by: -LAN- <laipz8200@outlook.com> Co-authored-by: Boris Feld <lothiraldan@gmail.com> Co-authored-by: mbo <himabo@gmail.com> Co-authored-by: mabo <mabo@aeyes.ai> Co-authored-by: Warren Chen <warren.chen830@gmail.com> Co-authored-by: JzoNgKVO <27049666+JzoNgKVO@users.noreply.github.com> Co-authored-by: jiandanfeng <chenjh3@wangsu.com> Co-authored-by: zhu-an <70234959+xhdd123321@users.noreply.github.com> Co-authored-by: zhaoqingyu.1075 <zhaoqingyu.1075@bytedance.com> Co-authored-by: 海狸大師 <86974027+yenslife@users.noreply.github.com> Co-authored-by: Xu Song <xusong.vip@gmail.com> Co-authored-by: rayshaw001 <396301947@163.com> Co-authored-by: Ding Jiatong <dingjiatong@gmail.com> Co-authored-by: Bowen Liang <liangbowen@gf.com.cn> Co-authored-by: JasonVV <jasonwangiii@outlook.com> Co-authored-by: le0zh <newlight@qq.com> Co-authored-by: zhuxinliang <zhuxinliang@didiglobal.com> Co-authored-by: k-zaku <zaku99@outlook.jp> Co-authored-by: luckylhb90 <luckylhb90@gmail.com> Co-authored-by: hobo.l <hobo.l@binance.com> Co-authored-by: jiangbo721 <365065261@qq.com> Co-authored-by: 刘江波 <jiangbo721@163.com> Co-authored-by: Shun Miyazawa <34241526+miya@users.noreply.github.com> Co-authored-by: EricPan <30651140+Egfly@users.noreply.github.com> Co-authored-by: crazywoola <427733928@qq.com> Co-authored-by: sino <sino2322@gmail.com> Co-authored-by: Jhvcc <37662342+Jhvcc@users.noreply.github.com> Co-authored-by: lowell <lowell.hu@zkteco.in> Co-authored-by: Boris Polonsky <BorisPolonsky@users.noreply.github.com> Co-authored-by: Ademílson Tonato <ademilsonft@outlook.com> Co-authored-by: Ademílson Tonato <ademilson.tonato@refurbed.com> Co-authored-by: IWAI, Masaharu <iwaim.sub@gmail.com> Co-authored-by: Yueh-Po Peng (Yabi) <94939112+y10ab1@users.noreply.github.com> Co-authored-by: Jason <ggbbddjm@gmail.com> Co-authored-by: Xin Zhang <sjhpzx@gmail.com> Co-authored-by: yjc980121 <3898524+yjc980121@users.noreply.github.com> Co-authored-by: heyszt <36215648+hieheihei@users.noreply.github.com> Co-authored-by: Abdullah AlOsaimi <osaimiacc@gmail.com> Co-authored-by: Abdullah AlOsaimi <189027247+osaimi@users.noreply.github.com> Co-authored-by: Yingchun Lai <laiyingchun@apache.org> Co-authored-by: Hash Brown <hi@xzd.me> Co-authored-by: zuodongxu <192560071+zuodongxu@users.noreply.github.com> Co-authored-by: Masashi Tomooka <tmokmss@users.noreply.github.com> Co-authored-by: aplio <ryo.091219@gmail.com> Co-authored-by: Obada Khalili <54270856+obadakhalili@users.noreply.github.com> Co-authored-by: Nam Vu <zuzoovn@gmail.com> Co-authored-by: Kei YAMAZAKI <1715090+kei-yamazaki@users.noreply.github.com> Co-authored-by: TechnoHouse <13776377+deephbz@users.noreply.github.com> Co-authored-by: Riddhimaan-Senapati <114703025+Riddhimaan-Senapati@users.noreply.github.com> Co-authored-by: MaFee921 <31881301+2284730142@users.noreply.github.com> Co-authored-by: te-chan <t-nakanome@sakura-is.co.jp> Co-authored-by: HQidea <HQidea@users.noreply.github.com> Co-authored-by: Joshbly <36315710+Joshbly@users.noreply.github.com> Co-authored-by: xhe <xw897002528@gmail.com> Co-authored-by: weiwenyan-dev <154779315+weiwenyan-dev@users.noreply.github.com> Co-authored-by: ex_wenyan.wei <ex_wenyan.wei@tcl.com> Co-authored-by: engchina <12236799+engchina@users.noreply.github.com> Co-authored-by: engchina <atjapan2015@gmail.com> Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> Co-authored-by: 呆萌闷油瓶 <253605712@qq.com> Co-authored-by: Kemal <kemalmeler@outlook.com> Co-authored-by: Lazy_Frog <4590648+lazyFrogLOL@users.noreply.github.com> Co-authored-by: Yi Xiao <54782454+YIXIAO0@users.noreply.github.com> Co-authored-by: Steven sun <98230804+Tuyohai@users.noreply.github.com> Co-authored-by: steven <sunzwj@digitalchina.com> Co-authored-by: Kalo Chin <91766386+fdb02983rhy@users.noreply.github.com> Co-authored-by: Katy Tao <34019945+KatyTao@users.noreply.github.com> Co-authored-by: depy <42985524+h4ckdepy@users.noreply.github.com> Co-authored-by: 胡春东 <gycm520@gmail.com> Co-authored-by: Junjie.M <118170653@qq.com> Co-authored-by: MuYu <mr.muzea@gmail.com> Co-authored-by: Naoki Takashima <39912547+takatea@users.noreply.github.com> Co-authored-by: Summer-Gu <37869445+gubinjie@users.noreply.github.com> Co-authored-by: Fei He <droxer.he@gmail.com> Co-authored-by: ybalbert001 <120714773+ybalbert001@users.noreply.github.com> Co-authored-by: Yuanbo Li <ybalbert@amazon.com> Co-authored-by: douxc <7553076+douxc@users.noreply.github.com> Co-authored-by: liuzhenghua <1090179900@qq.com> Co-authored-by: Wu Jiayang <62842862+Wu-Jiayang@users.noreply.github.com> Co-authored-by: Your Name <you@example.com> Co-authored-by: kimjion <45935338+kimjion@users.noreply.github.com> Co-authored-by: AugNSo <song.tiankai@icloud.com> Co-authored-by: llinvokerl <38915183+llinvokerl@users.noreply.github.com> Co-authored-by: liusurong.lsr <liusurong.lsr@alibaba-inc.com> Co-authored-by: Vasu Negi <vasu-negi@users.noreply.github.com> Co-authored-by: Hundredwz <1808096180@qq.com> Co-authored-by: Xiyuan Chen <52963600+GareArc@users.noreply.github.com>
This commit is contained in:
182
web/app/components/workflow/run/utils/format-log/agent/data.ts
Normal file
182
web/app/components/workflow/run/utils/format-log/agent/data.ts
Normal file
@@ -0,0 +1,182 @@
|
||||
import { BlockEnum } from '@/app/components/workflow/types'
|
||||
|
||||
export const agentNodeData = (() => {
|
||||
const node = {
|
||||
node_type: BlockEnum.Agent,
|
||||
execution_metadata: {
|
||||
agent_log: [
|
||||
{ id: '1', label: 'Root 1' },
|
||||
{ id: '2', parent_id: '1', label: 'Child 1.2' },
|
||||
{ id: '3', parent_id: '1', label: 'Child 1.3' },
|
||||
{ id: '4', parent_id: '2', label: 'Child 2.4' },
|
||||
{ id: '5', parent_id: '2', label: 'Child 2.5' },
|
||||
{ id: '6', parent_id: '3', label: 'Child 3.6' },
|
||||
{ id: '7', parent_id: '4', label: 'Child 4.7' },
|
||||
{ id: '8', parent_id: '4', label: 'Child 4.8' },
|
||||
{ id: '9', parent_id: '5', label: 'Child 5.9' },
|
||||
{ id: '10', parent_id: '5', label: 'Child 5.10' },
|
||||
{ id: '11', parent_id: '7', label: 'Child 7.11' },
|
||||
{ id: '12', parent_id: '7', label: 'Child 7.12' },
|
||||
{ id: '13', parent_id: '9', label: 'Child 9.13' },
|
||||
{ id: '14', parent_id: '9', label: 'Child 9.14' },
|
||||
{ id: '15', parent_id: '9', label: 'Child 9.15' },
|
||||
],
|
||||
},
|
||||
}
|
||||
|
||||
return {
|
||||
in: [node],
|
||||
expect: [{
|
||||
...node,
|
||||
agentLog: [
|
||||
{
|
||||
id: '1',
|
||||
label: 'Root 1',
|
||||
children: [
|
||||
{
|
||||
id: '2',
|
||||
parent_id: '1',
|
||||
label: 'Child 1.2',
|
||||
children: [
|
||||
{
|
||||
id: '4',
|
||||
parent_id: '2',
|
||||
label: 'Child 2.4',
|
||||
children: [
|
||||
{
|
||||
id: '7',
|
||||
parent_id: '4',
|
||||
label: 'Child 4.7',
|
||||
children: [
|
||||
{ id: '11', parent_id: '7', label: 'Child 7.11' },
|
||||
{ id: '12', parent_id: '7', label: 'Child 7.12' },
|
||||
],
|
||||
},
|
||||
{ id: '8', parent_id: '4', label: 'Child 4.8' },
|
||||
],
|
||||
},
|
||||
{
|
||||
id: '5',
|
||||
parent_id: '2',
|
||||
label: 'Child 2.5',
|
||||
children: [
|
||||
{
|
||||
id: '9',
|
||||
parent_id: '5',
|
||||
label: 'Child 5.9',
|
||||
children: [
|
||||
{ id: '13', parent_id: '9', label: 'Child 9.13' },
|
||||
{ id: '14', parent_id: '9', label: 'Child 9.14' },
|
||||
{ id: '15', parent_id: '9', label: 'Child 9.15' },
|
||||
],
|
||||
},
|
||||
{ id: '10', parent_id: '5', label: 'Child 5.10' },
|
||||
],
|
||||
},
|
||||
],
|
||||
},
|
||||
{
|
||||
id: '3',
|
||||
parent_id: '1',
|
||||
label: 'Child 1.3',
|
||||
children: [
|
||||
{ id: '6', parent_id: '3', label: 'Child 3.6' },
|
||||
],
|
||||
},
|
||||
],
|
||||
},
|
||||
],
|
||||
}],
|
||||
}
|
||||
})()
|
||||
|
||||
export const oneStepCircle = (() => {
|
||||
const node = {
|
||||
node_type: BlockEnum.Agent,
|
||||
execution_metadata: {
|
||||
agent_log: [
|
||||
{ id: '1', label: 'Node 1' },
|
||||
{ id: '1', parent_id: '1', label: 'Node 1' },
|
||||
{ id: '1', parent_id: '1', label: 'Node 1' },
|
||||
{ id: '1', parent_id: '1', label: 'Node 1' },
|
||||
{ id: '1', parent_id: '1', label: 'Node 1' },
|
||||
{ id: '1', parent_id: '1', label: 'Node 1' },
|
||||
],
|
||||
},
|
||||
}
|
||||
|
||||
return {
|
||||
in: [node],
|
||||
expect: [{
|
||||
...node,
|
||||
agentLog: [
|
||||
{
|
||||
id: '1',
|
||||
label: 'Node 1',
|
||||
hasCircle: true,
|
||||
children: [],
|
||||
},
|
||||
],
|
||||
}],
|
||||
}
|
||||
})()
|
||||
|
||||
export const multiStepsCircle = (() => {
|
||||
const node = {
|
||||
node_type: BlockEnum.Agent,
|
||||
execution_metadata: {
|
||||
agent_log: [
|
||||
// 1 -> [2 -> 4 -> 1, 3]
|
||||
{ id: '1', label: 'Node 1' },
|
||||
{ id: '2', parent_id: '1', label: 'Node 2' },
|
||||
{ id: '3', parent_id: '1', label: 'Node 3' },
|
||||
{ id: '4', parent_id: '2', label: 'Node 4' },
|
||||
|
||||
// Loop
|
||||
{ id: '1', parent_id: '4', label: 'Node 1' },
|
||||
{ id: '2', parent_id: '1', label: 'Node 2' },
|
||||
{ id: '4', parent_id: '2', label: 'Node 4' },
|
||||
{ id: '1', parent_id: '4', label: 'Node 1' },
|
||||
{ id: '2', parent_id: '1', label: 'Node 2' },
|
||||
{ id: '4', parent_id: '2', label: 'Node 4' },
|
||||
],
|
||||
},
|
||||
}
|
||||
// 1 -> [2(4(1(2(4...)))), 3]
|
||||
return {
|
||||
in: [node],
|
||||
expect: [{
|
||||
...node,
|
||||
agentLog: [
|
||||
{
|
||||
id: '1',
|
||||
label: 'Node 1',
|
||||
children: [
|
||||
{
|
||||
id: '2',
|
||||
parent_id: '1',
|
||||
label: 'Node 2',
|
||||
children: [
|
||||
{
|
||||
id: '4',
|
||||
parent_id: '2',
|
||||
label: 'Node 4',
|
||||
children: [],
|
||||
hasCircle: true,
|
||||
},
|
||||
],
|
||||
},
|
||||
{
|
||||
id: '3',
|
||||
parent_id: '1',
|
||||
label: 'Node 3',
|
||||
},
|
||||
],
|
||||
},
|
||||
],
|
||||
}],
|
||||
}
|
||||
})()
|
||||
|
||||
export const CircleNestCircle = (() => {
|
||||
})()
|
||||
@@ -0,0 +1,15 @@
|
||||
import format from '.'
|
||||
import { agentNodeData, multiStepsCircle, oneStepCircle } from './data'
|
||||
|
||||
describe('agent', () => {
|
||||
test('list should transform to tree', () => {
|
||||
// console.log(format(agentNodeData.in as any))
|
||||
expect(format(agentNodeData.in as any)).toEqual(agentNodeData.expect)
|
||||
})
|
||||
|
||||
test('list should remove circle log item', () => {
|
||||
// format(oneStepCircle.in as any)
|
||||
expect(format(oneStepCircle.in as any)).toEqual(oneStepCircle.expect)
|
||||
expect(format(multiStepsCircle.in as any)).toEqual(multiStepsCircle.expect)
|
||||
})
|
||||
})
|
||||
@@ -0,0 +1,99 @@
|
||||
import { BlockEnum } from '@/app/components/workflow/types'
|
||||
import type { AgentLogItem, AgentLogItemWithChildren, NodeTracing } from '@/types/workflow'
|
||||
import { cloneDeep } from 'lodash-es'
|
||||
|
||||
const supportedAgentLogNodes = [BlockEnum.Agent, BlockEnum.Tool]
|
||||
|
||||
const remove = (node: AgentLogItemWithChildren, removeId: string) => {
|
||||
let { children } = node
|
||||
if (!children || children.length === 0)
|
||||
return
|
||||
|
||||
const hasCircle = !!children.find(c => c.id === removeId)
|
||||
if (hasCircle) {
|
||||
node.hasCircle = true
|
||||
node.children = node.children.filter(c => c.id !== removeId)
|
||||
children = node.children
|
||||
}
|
||||
|
||||
children.forEach((child) => {
|
||||
remove(child, removeId)
|
||||
})
|
||||
}
|
||||
|
||||
const removeRepeatedSiblings = (list: AgentLogItemWithChildren[]) => {
|
||||
if (!list || list.length === 0)
|
||||
return []
|
||||
|
||||
const result: AgentLogItemWithChildren[] = []
|
||||
const addedItemIds: string[] = []
|
||||
list.forEach((item) => {
|
||||
if (!addedItemIds.includes(item.id)) {
|
||||
result.push(item)
|
||||
addedItemIds.push(item.id)
|
||||
}
|
||||
})
|
||||
return result
|
||||
}
|
||||
|
||||
const removeCircleLogItem = (log: AgentLogItemWithChildren) => {
|
||||
const newLog = cloneDeep(log)
|
||||
newLog.children = removeRepeatedSiblings(newLog.children)
|
||||
let { id, children } = newLog
|
||||
if (!children || children.length === 0)
|
||||
return log
|
||||
|
||||
// check one step circle
|
||||
const hasOneStepCircle = !!children.find(c => c.id === id)
|
||||
if (hasOneStepCircle) {
|
||||
newLog.hasCircle = true
|
||||
newLog.children = newLog.children.filter(c => c.id !== id)
|
||||
children = newLog.children
|
||||
}
|
||||
|
||||
children.forEach((child, index) => {
|
||||
remove(child, id) // check multi steps circle
|
||||
children[index] = removeCircleLogItem(child)
|
||||
})
|
||||
return newLog
|
||||
}
|
||||
|
||||
const listToTree = (logs: AgentLogItem[]) => {
|
||||
if (!logs || logs.length === 0)
|
||||
return []
|
||||
|
||||
const tree: AgentLogItemWithChildren[] = []
|
||||
logs.forEach((log) => {
|
||||
const hasParent = !!log.parent_id
|
||||
if (hasParent) {
|
||||
const parent = logs.find(item => item.id === log.parent_id) as AgentLogItemWithChildren
|
||||
if (parent) {
|
||||
if (!parent.children)
|
||||
parent.children = []
|
||||
parent.children.push(log as AgentLogItemWithChildren)
|
||||
}
|
||||
}
|
||||
else {
|
||||
tree.push(log as AgentLogItemWithChildren)
|
||||
}
|
||||
})
|
||||
return tree
|
||||
}
|
||||
|
||||
const format = (list: NodeTracing[]): NodeTracing[] => {
|
||||
const result: NodeTracing[] = list.map((item) => {
|
||||
let treeList: AgentLogItemWithChildren[] = []
|
||||
let removedCircleTree: AgentLogItemWithChildren[] = []
|
||||
if (supportedAgentLogNodes.includes(item.node_type) && item.execution_metadata?.agent_log && item.execution_metadata?.agent_log.length > 0)
|
||||
treeList = listToTree(item.execution_metadata.agent_log)
|
||||
// console.log(JSON.stringify(treeList))
|
||||
removedCircleTree = treeList.length > 0 ? treeList.map(t => removeCircleLogItem(t)) : []
|
||||
item.agentLog = removedCircleTree
|
||||
|
||||
return item
|
||||
})
|
||||
|
||||
return result
|
||||
}
|
||||
|
||||
export default format
|
||||
@@ -0,0 +1,128 @@
|
||||
import parseDSL from './graph-to-log-struct'
|
||||
|
||||
describe('parseDSL', () => {
|
||||
it('should parse plain nodes correctly', () => {
|
||||
const dsl = 'plainNode1 -> plainNode2'
|
||||
const result = parseDSL(dsl)
|
||||
expect(result).toEqual([
|
||||
{ id: 'plainNode1', node_id: 'plainNode1', title: 'plainNode1', execution_metadata: {}, status: 'succeeded' },
|
||||
{ id: 'plainNode2', node_id: 'plainNode2', title: 'plainNode2', execution_metadata: {}, status: 'succeeded' },
|
||||
])
|
||||
})
|
||||
|
||||
it('should parse retry nodes correctly', () => {
|
||||
const dsl = '(retry, retryNode, 3)'
|
||||
const result = parseDSL(dsl)
|
||||
expect(result).toEqual([
|
||||
{ id: 'retryNode', node_id: 'retryNode', title: 'retryNode', execution_metadata: {}, status: 'succeeded' },
|
||||
{ id: 'retryNode', node_id: 'retryNode', title: 'retryNode', execution_metadata: {}, status: 'retry' },
|
||||
{ id: 'retryNode', node_id: 'retryNode', title: 'retryNode', execution_metadata: {}, status: 'retry' },
|
||||
{ id: 'retryNode', node_id: 'retryNode', title: 'retryNode', execution_metadata: {}, status: 'retry' },
|
||||
])
|
||||
})
|
||||
|
||||
it('should parse iteration nodes correctly', () => {
|
||||
const dsl = '(iteration, iterationNode, plainNode1 -> plainNode2)'
|
||||
const result = parseDSL(dsl)
|
||||
expect(result).toEqual([
|
||||
{ id: 'iterationNode', node_id: 'iterationNode', title: 'iterationNode', node_type: 'iteration', execution_metadata: {}, status: 'succeeded' },
|
||||
{ id: 'plainNode1', node_id: 'plainNode1', title: 'plainNode1', execution_metadata: { iteration_id: 'iterationNode', iteration_index: 0 }, status: 'succeeded' },
|
||||
{ id: 'plainNode2', node_id: 'plainNode2', title: 'plainNode2', execution_metadata: { iteration_id: 'iterationNode', iteration_index: 0 }, status: 'succeeded' },
|
||||
])
|
||||
})
|
||||
|
||||
it('should parse parallel nodes correctly', () => {
|
||||
const dsl = '(parallel, parallelNode, nodeA, nodeB -> nodeC)'
|
||||
const result = parseDSL(dsl)
|
||||
expect(result).toEqual([
|
||||
{ id: 'parallelNode', node_id: 'parallelNode', title: 'parallelNode', execution_metadata: { parallel_id: 'parallelNode' }, status: 'succeeded' },
|
||||
{ id: 'nodeA', node_id: 'nodeA', title: 'nodeA', execution_metadata: { parallel_id: 'parallelNode', parallel_start_node_id: 'nodeA' }, status: 'succeeded' },
|
||||
{ id: 'nodeB', node_id: 'nodeB', title: 'nodeB', execution_metadata: { parallel_id: 'parallelNode', parallel_start_node_id: 'nodeB' }, status: 'succeeded' },
|
||||
{ id: 'nodeC', node_id: 'nodeC', title: 'nodeC', execution_metadata: { parallel_id: 'parallelNode', parallel_start_node_id: 'nodeB' }, status: 'succeeded' },
|
||||
])
|
||||
})
|
||||
|
||||
// TODO
|
||||
it('should handle nested parallel nodes', () => {
|
||||
const dsl = '(parallel, outerParallel, (parallel, innerParallel, plainNode1 -> plainNode2) -> plainNode3)'
|
||||
const result = parseDSL(dsl)
|
||||
expect(result).toEqual([
|
||||
{
|
||||
id: 'outerParallel',
|
||||
node_id: 'outerParallel',
|
||||
title: 'outerParallel',
|
||||
execution_metadata: { parallel_id: 'outerParallel' },
|
||||
status: 'succeeded',
|
||||
},
|
||||
{
|
||||
id: 'innerParallel',
|
||||
node_id: 'innerParallel',
|
||||
title: 'innerParallel',
|
||||
execution_metadata: { parallel_id: 'outerParallel', parallel_start_node_id: 'innerParallel' },
|
||||
status: 'succeeded',
|
||||
},
|
||||
{
|
||||
id: 'plainNode1',
|
||||
node_id: 'plainNode1',
|
||||
title: 'plainNode1',
|
||||
execution_metadata: {
|
||||
parallel_id: 'innerParallel',
|
||||
parallel_start_node_id: 'plainNode1',
|
||||
parent_parallel_id: 'outerParallel',
|
||||
parent_parallel_start_node_id: 'innerParallel',
|
||||
},
|
||||
status: 'succeeded',
|
||||
},
|
||||
{
|
||||
id: 'plainNode2',
|
||||
node_id: 'plainNode2',
|
||||
title: 'plainNode2',
|
||||
execution_metadata: {
|
||||
parallel_id: 'innerParallel',
|
||||
parallel_start_node_id: 'plainNode1',
|
||||
parent_parallel_id: 'outerParallel',
|
||||
parent_parallel_start_node_id: 'innerParallel',
|
||||
},
|
||||
status: 'succeeded',
|
||||
},
|
||||
{
|
||||
id: 'plainNode3',
|
||||
node_id: 'plainNode3',
|
||||
title: 'plainNode3',
|
||||
execution_metadata: {
|
||||
parallel_id: 'outerParallel',
|
||||
parallel_start_node_id: 'plainNode3',
|
||||
},
|
||||
status: 'succeeded',
|
||||
},
|
||||
])
|
||||
})
|
||||
|
||||
// iterations not support nested iterations
|
||||
// it('should handle nested iterations', () => {
|
||||
// const dsl = '(iteration, outerIteration, (iteration, innerIteration -> plainNode1 -> plainNode2))'
|
||||
// const result = parseDSL(dsl)
|
||||
// expect(result).toEqual([
|
||||
// { id: 'outerIteration', node_id: 'outerIteration', title: 'outerIteration', node_type: 'iteration', execution_metadata: {}, status: 'succeeded' },
|
||||
// { id: 'innerIteration', node_id: 'innerIteration', title: 'innerIteration', node_type: 'iteration', execution_metadata: { iteration_id: 'outerIteration', iteration_index: 0 }, status: 'succeeded' },
|
||||
// { id: 'plainNode1', node_id: 'plainNode1', title: 'plainNode1', execution_metadata: { iteration_id: 'innerIteration', iteration_index: 0 }, status: 'succeeded' },
|
||||
// { id: 'plainNode2', node_id: 'plainNode2', title: 'plainNode2', execution_metadata: { iteration_id: 'innerIteration', iteration_index: 0 }, status: 'succeeded' },
|
||||
// ])
|
||||
// })
|
||||
|
||||
it('should handle nested iterations within parallel nodes', () => {
|
||||
const dsl = '(parallel, parallelNode, (iteration, iterationNode, plainNode1, plainNode2))'
|
||||
const result = parseDSL(dsl)
|
||||
expect(result).toEqual([
|
||||
{ id: 'parallelNode', node_id: 'parallelNode', title: 'parallelNode', execution_metadata: { parallel_id: 'parallelNode' }, status: 'succeeded' },
|
||||
{ id: 'iterationNode', node_id: 'iterationNode', title: 'iterationNode', node_type: 'iteration', execution_metadata: { parallel_id: 'parallelNode', parallel_start_node_id: 'iterationNode' }, status: 'succeeded' },
|
||||
{ id: 'plainNode1', node_id: 'plainNode1', title: 'plainNode1', execution_metadata: { iteration_id: 'iterationNode', iteration_index: 0, parallel_id: 'parallelNode', parallel_start_node_id: 'iterationNode' }, status: 'succeeded' },
|
||||
{ id: 'plainNode2', node_id: 'plainNode2', title: 'plainNode2', execution_metadata: { iteration_id: 'iterationNode', iteration_index: 0, parallel_id: 'parallelNode', parallel_start_node_id: 'iterationNode' }, status: 'succeeded' },
|
||||
])
|
||||
})
|
||||
|
||||
it('should throw an error for unknown node types', () => {
|
||||
const dsl = '(unknown, nodeId)'
|
||||
expect(() => parseDSL(dsl)).toThrowError('Unknown nodeType: unknown')
|
||||
})
|
||||
})
|
||||
@@ -0,0 +1,304 @@
|
||||
type IterationInfo = { iterationId: string; iterationIndex: number }
|
||||
type NodePlain = { nodeType: 'plain'; nodeId: string; } & Partial<IterationInfo>
|
||||
type NodeComplex = { nodeType: string; nodeId: string; params: (NodePlain | (NodeComplex & Partial<IterationInfo>) | Node[] | number)[] } & Partial<IterationInfo>
|
||||
type Node = NodePlain | NodeComplex
|
||||
|
||||
/**
|
||||
* Parses a DSL string into an array of node objects.
|
||||
* @param dsl - The input DSL string.
|
||||
* @returns An array of parsed nodes.
|
||||
*/
|
||||
function parseDSL(dsl: string): NodeData[] {
|
||||
return convertToNodeData(parseTopLevelFlow(dsl).map(nodeStr => parseNode(nodeStr)))
|
||||
}
|
||||
|
||||
/**
|
||||
* Splits a top-level flow string by "->", respecting nested structures.
|
||||
* @param dsl - The DSL string to split.
|
||||
* @returns An array of top-level segments.
|
||||
*/
|
||||
function parseTopLevelFlow(dsl: string): string[] {
|
||||
const segments: string[] = []
|
||||
let buffer = ''
|
||||
let nested = 0
|
||||
|
||||
for (let i = 0; i < dsl.length; i++) {
|
||||
const char = dsl[i]
|
||||
if (char === '(') nested++
|
||||
if (char === ')') nested--
|
||||
if (char === '-' && dsl[i + 1] === '>' && nested === 0) {
|
||||
segments.push(buffer.trim())
|
||||
buffer = ''
|
||||
i++ // Skip the ">" character
|
||||
}
|
||||
else {
|
||||
buffer += char
|
||||
}
|
||||
}
|
||||
if (buffer.trim())
|
||||
segments.push(buffer.trim())
|
||||
|
||||
return segments
|
||||
}
|
||||
|
||||
/**
|
||||
* Parses a single node string.
|
||||
* If the node is complex (e.g., has parentheses), it extracts the node type, node ID, and parameters.
|
||||
* @param nodeStr - The node string to parse.
|
||||
* @param parentIterationId - The ID of the parent iteration node (if applicable).
|
||||
* @returns A parsed node object.
|
||||
*/
|
||||
function parseNode(nodeStr: string, parentIterationId?: string): Node {
|
||||
// Check if the node is a complex node
|
||||
if (nodeStr.startsWith('(') && nodeStr.endsWith(')')) {
|
||||
const innerContent = nodeStr.slice(1, -1).trim() // Remove outer parentheses
|
||||
let nested = 0
|
||||
let buffer = ''
|
||||
const parts: string[] = []
|
||||
|
||||
// Split the inner content by commas, respecting nested parentheses
|
||||
for (let i = 0; i < innerContent.length; i++) {
|
||||
const char = innerContent[i]
|
||||
if (char === '(') nested++
|
||||
if (char === ')') nested--
|
||||
|
||||
if (char === ',' && nested === 0) {
|
||||
parts.push(buffer.trim())
|
||||
buffer = ''
|
||||
}
|
||||
else {
|
||||
buffer += char
|
||||
}
|
||||
}
|
||||
parts.push(buffer.trim())
|
||||
|
||||
// Extract nodeType, nodeId, and params
|
||||
const [nodeType, nodeId, ...paramsRaw] = parts
|
||||
const params = parseParams(paramsRaw, nodeType === 'iteration' ? nodeId.trim() : parentIterationId)
|
||||
const complexNode = {
|
||||
nodeType: nodeType.trim(),
|
||||
nodeId: nodeId.trim(),
|
||||
params,
|
||||
}
|
||||
if (parentIterationId) {
|
||||
(complexNode as any).iterationId = parentIterationId;
|
||||
(complexNode as any).iterationIndex = 0 // Fixed as 0
|
||||
}
|
||||
return complexNode
|
||||
}
|
||||
|
||||
// If it's not a complex node, treat it as a plain node
|
||||
const plainNode: NodePlain = { nodeType: 'plain', nodeId: nodeStr.trim() }
|
||||
if (parentIterationId) {
|
||||
plainNode.iterationId = parentIterationId
|
||||
plainNode.iterationIndex = 0 // Fixed as 0
|
||||
}
|
||||
return plainNode
|
||||
}
|
||||
|
||||
/**
|
||||
* Parses parameters of a complex node.
|
||||
* Supports nested flows and complex sub-nodes.
|
||||
* Adds iteration-specific metadata recursively.
|
||||
* @param paramParts - The parameters string split by commas.
|
||||
* @param iterationId - The ID of the iteration node, if applicable.
|
||||
* @returns An array of parsed parameters (plain nodes, nested nodes, or flows).
|
||||
*/
|
||||
function parseParams(paramParts: string[], iterationId?: string): (Node | Node[] | number)[] {
|
||||
return paramParts.map((part) => {
|
||||
if (part.includes('->')) {
|
||||
// Parse as a flow and return an array of nodes
|
||||
return parseTopLevelFlow(part).map(node => parseNode(node, iterationId))
|
||||
}
|
||||
else if (part.startsWith('(')) {
|
||||
// Parse as a nested complex node
|
||||
return parseNode(part, iterationId)
|
||||
}
|
||||
else if (!Number.isNaN(Number(part.trim()))) {
|
||||
// Parse as a numeric parameter
|
||||
return Number(part.trim())
|
||||
}
|
||||
else {
|
||||
// Parse as a plain node
|
||||
return parseNode(part, iterationId)
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
type NodeData = {
|
||||
id: string;
|
||||
node_id: string;
|
||||
title: string;
|
||||
node_type?: string;
|
||||
execution_metadata: Record<string, any>;
|
||||
status: string;
|
||||
}
|
||||
|
||||
/**
|
||||
* Converts a plain node to node data.
|
||||
*/
|
||||
function convertPlainNode(node: Node): NodeData[] {
|
||||
return [
|
||||
{
|
||||
id: node.nodeId,
|
||||
node_id: node.nodeId,
|
||||
title: node.nodeId,
|
||||
execution_metadata: {},
|
||||
status: 'succeeded',
|
||||
},
|
||||
]
|
||||
}
|
||||
|
||||
/**
|
||||
* Converts a retry node to node data.
|
||||
*/
|
||||
function convertRetryNode(node: Node): NodeData[] {
|
||||
const { nodeId, iterationId, iterationIndex, params } = node as NodeComplex
|
||||
const retryCount = params ? Number.parseInt(params[0] as unknown as string, 10) : 0
|
||||
const result: NodeData[] = [
|
||||
{
|
||||
id: nodeId,
|
||||
node_id: nodeId,
|
||||
title: nodeId,
|
||||
execution_metadata: {},
|
||||
status: 'succeeded',
|
||||
},
|
||||
]
|
||||
|
||||
for (let i = 0; i < retryCount; i++) {
|
||||
result.push({
|
||||
id: nodeId,
|
||||
node_id: nodeId,
|
||||
title: nodeId,
|
||||
execution_metadata: iterationId ? {
|
||||
iteration_id: iterationId,
|
||||
iteration_index: iterationIndex || 0,
|
||||
} : {},
|
||||
status: 'retry',
|
||||
})
|
||||
}
|
||||
|
||||
return result
|
||||
}
|
||||
|
||||
/**
|
||||
* Converts an iteration node to node data.
|
||||
*/
|
||||
function convertIterationNode(node: Node): NodeData[] {
|
||||
const { nodeId, params } = node as NodeComplex
|
||||
const result: NodeData[] = [
|
||||
{
|
||||
id: nodeId,
|
||||
node_id: nodeId,
|
||||
title: nodeId,
|
||||
node_type: 'iteration',
|
||||
status: 'succeeded',
|
||||
execution_metadata: {},
|
||||
},
|
||||
]
|
||||
|
||||
params?.forEach((param: any) => {
|
||||
if (Array.isArray(param)) {
|
||||
param.forEach((childNode: Node) => {
|
||||
const childData = convertToNodeData([childNode])
|
||||
childData.forEach((data) => {
|
||||
data.execution_metadata = {
|
||||
...data.execution_metadata,
|
||||
iteration_id: nodeId,
|
||||
iteration_index: 0,
|
||||
}
|
||||
})
|
||||
result.push(...childData)
|
||||
})
|
||||
}
|
||||
})
|
||||
|
||||
return result
|
||||
}
|
||||
|
||||
/**
|
||||
* Converts a parallel node to node data.
|
||||
*/
|
||||
function convertParallelNode(node: Node, parentParallelId?: string, parentStartNodeId?: string): NodeData[] {
|
||||
const { nodeId, params } = node as NodeComplex
|
||||
const result: NodeData[] = [
|
||||
{
|
||||
id: nodeId,
|
||||
node_id: nodeId,
|
||||
title: nodeId,
|
||||
execution_metadata: {
|
||||
parallel_id: nodeId,
|
||||
},
|
||||
status: 'succeeded',
|
||||
},
|
||||
]
|
||||
|
||||
params?.forEach((param) => {
|
||||
if (Array.isArray(param)) {
|
||||
const startNodeId = param[0]?.nodeId
|
||||
param.forEach((childNode: Node) => {
|
||||
const childData = convertToNodeData([childNode])
|
||||
childData.forEach((data) => {
|
||||
data.execution_metadata = {
|
||||
...data.execution_metadata,
|
||||
parallel_id: nodeId,
|
||||
parallel_start_node_id: startNodeId,
|
||||
...(parentParallelId && {
|
||||
parent_parallel_id: parentParallelId,
|
||||
parent_parallel_start_node_id: parentStartNodeId,
|
||||
}),
|
||||
}
|
||||
})
|
||||
result.push(...childData)
|
||||
})
|
||||
}
|
||||
else if (param && typeof param === 'object') {
|
||||
const startNodeId = param.nodeId
|
||||
const childData = convertToNodeData([param])
|
||||
childData.forEach((data) => {
|
||||
data.execution_metadata = {
|
||||
...data.execution_metadata,
|
||||
parallel_id: nodeId,
|
||||
parallel_start_node_id: startNodeId,
|
||||
...(parentParallelId && {
|
||||
parent_parallel_id: parentParallelId,
|
||||
parent_parallel_start_node_id: parentStartNodeId,
|
||||
}),
|
||||
}
|
||||
})
|
||||
result.push(...childData)
|
||||
}
|
||||
})
|
||||
|
||||
return result
|
||||
}
|
||||
|
||||
/**
|
||||
* Main function to convert nodes to node data.
|
||||
*/
|
||||
function convertToNodeData(nodes: Node[], parentParallelId?: string, parentStartNodeId?: string): NodeData[] {
|
||||
const result: NodeData[] = []
|
||||
|
||||
nodes.forEach((node) => {
|
||||
switch (node.nodeType) {
|
||||
case 'plain':
|
||||
result.push(...convertPlainNode(node))
|
||||
break
|
||||
case 'retry':
|
||||
result.push(...convertRetryNode(node))
|
||||
break
|
||||
case 'iteration':
|
||||
result.push(...convertIterationNode(node))
|
||||
break
|
||||
case 'parallel':
|
||||
result.push(...convertParallelNode(node, parentParallelId, parentStartNodeId))
|
||||
break
|
||||
default:
|
||||
throw new Error(`Unknown nodeType: ${node.nodeType}`)
|
||||
}
|
||||
})
|
||||
|
||||
return result
|
||||
}
|
||||
|
||||
export default parseDSL
|
||||
@@ -0,0 +1,10 @@
|
||||
import formatToTracingNodeList from '.'
|
||||
import { simpleIterationData } from './iteration/data'
|
||||
import { simpleRetryData } from './retry/data'
|
||||
|
||||
describe('format api data to tracing panel data', () => {
|
||||
test('integration', () => {
|
||||
expect(formatToTracingNodeList(simpleIterationData.in.reverse() as any)).toEqual(simpleIterationData.expect)
|
||||
expect(formatToTracingNodeList(simpleRetryData.in.reverse() as any)).toEqual(simpleRetryData.expect)
|
||||
})
|
||||
})
|
||||
27
web/app/components/workflow/run/utils/format-log/index.ts
Normal file
27
web/app/components/workflow/run/utils/format-log/index.ts
Normal file
@@ -0,0 +1,27 @@
|
||||
import type { NodeTracing } from '@/types/workflow'
|
||||
import formatIterationNode from './iteration'
|
||||
import formatParallelNode from './parallel'
|
||||
import formatRetryNode from './retry'
|
||||
import formatAgentNode from './agent'
|
||||
import { cloneDeep } from 'lodash-es'
|
||||
|
||||
const formatToTracingNodeList = (list: NodeTracing[], t: any) => {
|
||||
const allItems = cloneDeep([...list]).sort((a, b) => a.index - b.index)
|
||||
/*
|
||||
* First handle not change list structure node
|
||||
* Because Handle struct node will put the node in different
|
||||
*/
|
||||
const formattedAgentList = formatAgentNode(allItems)
|
||||
const formattedRetryList = formatRetryNode(formattedAgentList) // retry one node
|
||||
// would change the structure of the list. Iteration and parallel can include each other.
|
||||
const formattedIterationList = formatIterationNode(formattedRetryList, t)
|
||||
const formattedParallelList = formatParallelNode(formattedIterationList, t)
|
||||
|
||||
const result = formattedParallelList
|
||||
// console.log(allItems)
|
||||
// console.log(result)
|
||||
|
||||
return result
|
||||
}
|
||||
|
||||
export default formatToTracingNodeList
|
||||
@@ -0,0 +1,22 @@
|
||||
import format from '.'
|
||||
import graphToLogStruct from '../graph-to-log-struct'
|
||||
|
||||
describe('iteration', () => {
|
||||
const list = graphToLogStruct('start -> (iteration, iterationNode, plainNode1 -> plainNode2)')
|
||||
const [startNode, iterationNode, ...iterations] = list
|
||||
const result = format(list as any, () => { })
|
||||
test('result should have no nodes in iteration node', () => {
|
||||
expect((result as any).find((item: any) => !!item.execution_metadata?.iteration_id)).toBeUndefined()
|
||||
})
|
||||
test('iteration should put nodes in details', () => {
|
||||
expect(result as any).toEqual([
|
||||
startNode,
|
||||
{
|
||||
...iterationNode,
|
||||
details: [
|
||||
[iterations[0], iterations[1]],
|
||||
],
|
||||
},
|
||||
])
|
||||
})
|
||||
})
|
||||
@@ -0,0 +1,55 @@
|
||||
import { BlockEnum } from '@/app/components/workflow/types'
|
||||
import type { NodeTracing } from '@/types/workflow'
|
||||
import formatParallelNode from '../parallel'
|
||||
function addChildrenToIterationNode(iterationNode: NodeTracing, childrenNodes: NodeTracing[]): NodeTracing {
|
||||
const details: NodeTracing[][] = []
|
||||
childrenNodes.forEach((item, index) => {
|
||||
if (!item.execution_metadata) return
|
||||
const { iteration_index = 0 } = item.execution_metadata
|
||||
const runIndex: number = iteration_index || index
|
||||
if (!details[runIndex])
|
||||
details[runIndex] = []
|
||||
|
||||
details[runIndex].push(item)
|
||||
})
|
||||
return {
|
||||
...iterationNode,
|
||||
details,
|
||||
}
|
||||
}
|
||||
|
||||
const format = (list: NodeTracing[], t: any): NodeTracing[] => {
|
||||
const iterationNodeIds = list
|
||||
.filter(item => item.node_type === BlockEnum.Iteration)
|
||||
.map(item => item.node_id)
|
||||
const iterationChildrenNodeIds = list
|
||||
.filter(item => item.execution_metadata?.iteration_id && iterationNodeIds.includes(item.execution_metadata.iteration_id))
|
||||
.map(item => item.node_id)
|
||||
// move iteration children nodes to iteration node's details field
|
||||
const result = list
|
||||
.filter(item => !iterationChildrenNodeIds.includes(item.node_id))
|
||||
.map((item) => {
|
||||
if (item.node_type === BlockEnum.Iteration) {
|
||||
const childrenNodes = list.filter(child => child.execution_metadata?.iteration_id === item.node_id)
|
||||
const error = childrenNodes.find(child => child.status === 'failed')
|
||||
if (error) {
|
||||
item.status = 'failed'
|
||||
item.error = error.error
|
||||
}
|
||||
const addedChildrenList = addChildrenToIterationNode(item, childrenNodes)
|
||||
// handle parallel node in iteration node
|
||||
if (addedChildrenList.details && addedChildrenList.details.length > 0) {
|
||||
addedChildrenList.details = addedChildrenList.details.map((row) => {
|
||||
return formatParallelNode(row, t)
|
||||
})
|
||||
}
|
||||
return addedChildrenList
|
||||
}
|
||||
|
||||
return item
|
||||
})
|
||||
|
||||
return result
|
||||
}
|
||||
|
||||
export default format
|
||||
@@ -0,0 +1,39 @@
|
||||
import { cloneDeep } from 'lodash-es'
|
||||
import format from '.'
|
||||
import graphToLogStruct from '../graph-to-log-struct'
|
||||
|
||||
describe('parallel', () => {
|
||||
const list = graphToLogStruct('(parallel, parallelNode, nodeA, nodeB -> nodeC)')
|
||||
const [parallelNode, ...parallelDetail] = list
|
||||
const parallelI18n = 'PARALLEL'
|
||||
// format will change the list...
|
||||
const result = format(cloneDeep(list) as any, () => parallelI18n)
|
||||
|
||||
test('parallel should put nodes in details', () => {
|
||||
expect(result as any).toEqual([
|
||||
{
|
||||
...parallelNode,
|
||||
parallelDetail: {
|
||||
isParallelStartNode: true,
|
||||
parallelTitle: `${parallelI18n}-1`,
|
||||
children: [
|
||||
parallelNode,
|
||||
{
|
||||
...parallelDetail[0],
|
||||
parallelDetail: {
|
||||
branchTitle: `${parallelI18n}-1-A`,
|
||||
},
|
||||
},
|
||||
{
|
||||
...parallelDetail[1],
|
||||
parallelDetail: {
|
||||
branchTitle: `${parallelI18n}-1-B`,
|
||||
},
|
||||
},
|
||||
parallelDetail[2],
|
||||
],
|
||||
},
|
||||
},
|
||||
])
|
||||
})
|
||||
})
|
||||
@@ -0,0 +1,175 @@
|
||||
import { BlockEnum } from '@/app/components/workflow/types'
|
||||
import type { NodeTracing } from '@/types/workflow'
|
||||
|
||||
function printNodeStructure(node: NodeTracing, depth: number) {
|
||||
const indent = ' '.repeat(depth)
|
||||
console.log(`${indent}${node.title}`)
|
||||
if (node.parallelDetail?.children) {
|
||||
node.parallelDetail.children.forEach((child) => {
|
||||
printNodeStructure(child, depth + 1)
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
function addTitle({
|
||||
list, depth, belongParallelIndexInfo,
|
||||
}: {
|
||||
list: NodeTracing[],
|
||||
depth: number,
|
||||
belongParallelIndexInfo?: string,
|
||||
}, t: any) {
|
||||
let branchIndex = 0
|
||||
const hasMoreThanOneParallel = list.filter(node => node.parallelDetail?.isParallelStartNode).length > 1
|
||||
list.forEach((node) => {
|
||||
const parallel_id = node.parallel_id ?? node.execution_metadata?.parallel_id ?? null
|
||||
const parallel_start_node_id = node.parallel_start_node_id ?? node.execution_metadata?.parallel_start_node_id ?? null
|
||||
|
||||
const isNotInParallel = !parallel_id || node.node_type === BlockEnum.End
|
||||
if (isNotInParallel)
|
||||
return
|
||||
|
||||
const isParallelStartNode = node.parallelDetail?.isParallelStartNode
|
||||
|
||||
const parallelIndexLetter = (() => {
|
||||
if (!isParallelStartNode || !hasMoreThanOneParallel)
|
||||
return ''
|
||||
|
||||
const index = 1 + list.filter(node => node.parallelDetail?.isParallelStartNode).findIndex(item => item.node_id === node.node_id)
|
||||
return String.fromCharCode(64 + index)
|
||||
})()
|
||||
|
||||
const parallelIndexInfo = `${depth}${parallelIndexLetter}`
|
||||
|
||||
if (isParallelStartNode) {
|
||||
node.parallelDetail!.isParallelStartNode = true
|
||||
node.parallelDetail!.parallelTitle = `${t('workflow.common.parallel')}-${parallelIndexInfo}`
|
||||
}
|
||||
|
||||
const isBrachStartNode = parallel_start_node_id === node.node_id
|
||||
if (isBrachStartNode) {
|
||||
branchIndex++
|
||||
const branchLetter = String.fromCharCode(64 + branchIndex)
|
||||
if (!node.parallelDetail) {
|
||||
node.parallelDetail = {
|
||||
branchTitle: '',
|
||||
}
|
||||
}
|
||||
|
||||
node.parallelDetail!.branchTitle = `${t('workflow.common.branch')}-${belongParallelIndexInfo}-${branchLetter}`
|
||||
}
|
||||
|
||||
if (node.parallelDetail?.children && node.parallelDetail.children.length > 0) {
|
||||
addTitle({
|
||||
list: node.parallelDetail.children,
|
||||
depth: depth + 1,
|
||||
belongParallelIndexInfo: parallelIndexInfo,
|
||||
}, t)
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
// list => group by parallel_id(parallel tree).
|
||||
const format = (list: NodeTracing[], t: any, isPrint?: boolean): NodeTracing[] => {
|
||||
if (isPrint)
|
||||
console.log(list)
|
||||
|
||||
const result: NodeTracing[] = [...list]
|
||||
const parallelFirstNodeMap: Record<string, string> = {}
|
||||
// list to tree by parent_parallel_start_node_id and branch by parallel_start_node_id. Each parallel may has more than one branch.
|
||||
result.forEach((node) => {
|
||||
const parallel_id = node.parallel_id ?? node.execution_metadata?.parallel_id ?? null
|
||||
const parent_parallel_id = node.parent_parallel_id ?? node.execution_metadata?.parent_parallel_id ?? null
|
||||
const branchStartNodeId = node.parallel_start_node_id ?? node.execution_metadata?.parallel_start_node_id ?? null
|
||||
const parentParallelBranchStartNodeId = node.parent_parallel_start_node_id ?? node.execution_metadata?.parent_parallel_start_node_id ?? null
|
||||
const isNotInParallel = !parallel_id || node.node_type === BlockEnum.End
|
||||
if (isNotInParallel)
|
||||
return
|
||||
|
||||
const isParallelStartNode = !parallelFirstNodeMap[parallel_id]
|
||||
if (isParallelStartNode) {
|
||||
const selfNode = { ...node, parallelDetail: undefined }
|
||||
node.parallelDetail = {
|
||||
isParallelStartNode: true,
|
||||
children: [selfNode],
|
||||
}
|
||||
parallelFirstNodeMap[parallel_id] = node.node_id
|
||||
const isRootLevel = !parent_parallel_id
|
||||
if (isRootLevel)
|
||||
return
|
||||
|
||||
const parentParallelStartNode = result.find(item => item.node_id === parentParallelBranchStartNodeId)
|
||||
// append to parent parallel start node and after the same branch
|
||||
if (parentParallelStartNode) {
|
||||
if (!parentParallelStartNode?.parallelDetail) {
|
||||
parentParallelStartNode!.parallelDetail = {
|
||||
children: [],
|
||||
}
|
||||
}
|
||||
if (parentParallelStartNode!.parallelDetail.children) {
|
||||
const sameBranchNodesLastIndex = parentParallelStartNode.parallelDetail.children.findLastIndex((node) => {
|
||||
const currStartNodeId = node.parallel_start_node_id ?? node.execution_metadata?.parallel_start_node_id ?? null
|
||||
return currStartNodeId === parentParallelBranchStartNodeId
|
||||
})
|
||||
if (sameBranchNodesLastIndex !== -1)
|
||||
parentParallelStartNode!.parallelDetail.children.splice(sameBranchNodesLastIndex + 1, 0, node)
|
||||
else
|
||||
parentParallelStartNode!.parallelDetail.children.push(node)
|
||||
}
|
||||
}
|
||||
return
|
||||
}
|
||||
|
||||
// append to parallel start node and after the same branch
|
||||
const parallelStartNode = result.find(item => item.node_id === parallelFirstNodeMap[parallel_id])
|
||||
|
||||
if (parallelStartNode && parallelStartNode.parallelDetail && parallelStartNode!.parallelDetail!.children) {
|
||||
const sameBranchNodesLastIndex = parallelStartNode.parallelDetail.children.findLastIndex((node) => {
|
||||
const currStartNodeId = node.parallel_start_node_id ?? node.execution_metadata?.parallel_start_node_id ?? null
|
||||
return currStartNodeId === branchStartNodeId
|
||||
})
|
||||
if (sameBranchNodesLastIndex !== -1) {
|
||||
parallelStartNode.parallelDetail.children.splice(sameBranchNodesLastIndex + 1, 0, node)
|
||||
}
|
||||
else { // new branch
|
||||
parallelStartNode.parallelDetail.children.push(node)
|
||||
}
|
||||
}
|
||||
// parallelStartNode!.parallelDetail!.children.push(node)
|
||||
})
|
||||
|
||||
const filteredInParallelSubNodes = result.filter((node) => {
|
||||
const parallel_id = node.parallel_id ?? node.execution_metadata?.parallel_id ?? null
|
||||
const isNotInParallel = !parallel_id || node.node_type === BlockEnum.End
|
||||
if (isNotInParallel)
|
||||
return true
|
||||
|
||||
const parent_parallel_id = node.parent_parallel_id ?? node.execution_metadata?.parent_parallel_id ?? null
|
||||
|
||||
if (parent_parallel_id)
|
||||
return false
|
||||
|
||||
const isParallelStartNode = node.parallelDetail?.isParallelStartNode
|
||||
if (!isParallelStartNode)
|
||||
return false
|
||||
|
||||
return true
|
||||
})
|
||||
|
||||
// print node structure for debug
|
||||
if (isPrint) {
|
||||
filteredInParallelSubNodes.forEach((node) => {
|
||||
const now = Date.now()
|
||||
console.log(`----- p: ${now} start -----`)
|
||||
printNodeStructure(node, 0)
|
||||
console.log(`----- p: ${now} end -----`)
|
||||
})
|
||||
}
|
||||
|
||||
addTitle({
|
||||
list: filteredInParallelSubNodes,
|
||||
depth: 1,
|
||||
}, t)
|
||||
|
||||
return filteredInParallelSubNodes
|
||||
}
|
||||
export default format
|
||||
@@ -0,0 +1,21 @@
|
||||
import format from '.'
|
||||
import graphToLogStruct from '../graph-to-log-struct'
|
||||
|
||||
describe('retry', () => {
|
||||
// retry nodeId:1 3 times.
|
||||
const steps = graphToLogStruct('start -> (retry, retryNode, 3)')
|
||||
const [startNode, retryNode, ...retryDetail] = steps
|
||||
const result = format(steps as any)
|
||||
test('should have no retry status nodes', () => {
|
||||
expect(result.find(item => (item as any).status === 'retry')).toBeUndefined()
|
||||
})
|
||||
test('should put retry nodes in retryDetail', () => {
|
||||
expect(result).toEqual([
|
||||
startNode,
|
||||
{
|
||||
...retryNode,
|
||||
retryDetail,
|
||||
},
|
||||
])
|
||||
})
|
||||
})
|
||||
@@ -0,0 +1,35 @@
|
||||
import type { NodeTracing } from '@/types/workflow'
|
||||
|
||||
const format = (list: NodeTracing[]): NodeTracing[] => {
|
||||
const retryNodes = list.filter((item) => {
|
||||
return item.status === 'retry'
|
||||
})
|
||||
|
||||
const retryNodeIds = retryNodes.map(item => item.node_id)
|
||||
// move retry nodes to retryDetail
|
||||
const result = list.filter((item) => {
|
||||
return item.status !== 'retry'
|
||||
}).map((item) => {
|
||||
const { execution_metadata } = item
|
||||
const isInIteration = !!execution_metadata?.iteration_id
|
||||
const nodeId = item.node_id
|
||||
const isRetryBelongNode = retryNodeIds.includes(nodeId)
|
||||
|
||||
if (isRetryBelongNode) {
|
||||
return {
|
||||
...item,
|
||||
retryDetail: retryNodes.filter((node) => {
|
||||
if (!isInIteration)
|
||||
return node.node_id === nodeId
|
||||
|
||||
// retry node in iteration
|
||||
return node.node_id === nodeId && node.execution_metadata?.iteration_index === execution_metadata?.iteration_index
|
||||
}),
|
||||
}
|
||||
}
|
||||
return item
|
||||
})
|
||||
return result
|
||||
}
|
||||
|
||||
export default format
|
||||
Reference in New Issue
Block a user