feat: llm support jinja fe (#4260)

This commit is contained in:
Joel
2024-05-10 18:14:05 +08:00
committed by GitHub
parent 6b99075dc8
commit 01555463d2
21 changed files with 621 additions and 177 deletions

View File

@@ -3,7 +3,8 @@ import type { FC } from 'react'
import React, { useEffect, useState } from 'react'
import { uniqueId } from 'lodash-es'
import { useTranslation } from 'react-i18next'
import type { PromptItem } from '../../../types'
import type { PromptItem, Variable } from '../../../types'
import { EditionType } from '../../../types'
import Editor from '@/app/components/workflow/nodes/_base/components/prompt/editor'
import TypeSelector from '@/app/components/workflow/nodes/_base/components/selector'
import TooltipPlus from '@/app/components/base/tooltip-plus'
@@ -24,6 +25,7 @@ type Props = {
payload: PromptItem
handleChatModeMessageRoleChange: (role: PromptRole) => void
onPromptChange: (p: string) => void
onEditionTypeChange: (editionType: EditionType) => void
onRemove: () => void
isShowContext: boolean
hasSetBlockStatus: {
@@ -33,6 +35,8 @@ type Props = {
}
availableVars: any
availableNodes: any
varList: Variable[]
handleAddVariable: (payload: any) => void
}
const roleOptions = [
@@ -64,17 +68,21 @@ const ConfigPromptItem: FC<Props> = ({
isChatApp,
payload,
onPromptChange,
onEditionTypeChange,
onRemove,
isShowContext,
hasSetBlockStatus,
availableVars,
availableNodes,
varList,
handleAddVariable,
}) => {
const { t } = useTranslation()
const [instanceId, setInstanceId] = useState(uniqueId())
useEffect(() => {
setInstanceId(`${id}-${uniqueId()}`)
}, [id])
return (
<Editor
className={className}
@@ -107,7 +115,7 @@ const ConfigPromptItem: FC<Props> = ({
</TooltipPlus>
</div>
}
value={payload.text}
value={payload.edition_type === EditionType.jinja2 ? (payload.jinja2_text || '') : payload.text}
onChange={onPromptChange}
readOnly={readOnly}
showRemove={canRemove}
@@ -118,6 +126,11 @@ const ConfigPromptItem: FC<Props> = ({
hasSetBlockStatus={hasSetBlockStatus}
nodesOutputVars={availableVars}
availableNodes={availableNodes}
isSupportJinja
editionType={payload.edition_type}
onEditionTypeChange={onEditionTypeChange}
varList={varList}
handleAddVariable={handleAddVariable}
/>
)
}

View File

@@ -6,8 +6,8 @@ import produce from 'immer'
import { ReactSortable } from 'react-sortablejs'
import { v4 as uuid4 } from 'uuid'
import cn from 'classnames'
import type { PromptItem, ValueSelector, Var } from '../../../types'
import { PromptRole } from '../../../types'
import type { PromptItem, ValueSelector, Var, Variable } from '../../../types'
import { EditionType, PromptRole } from '../../../types'
import useAvailableVarList from '../../_base/hooks/use-available-var-list'
import ConfigPromptItem from './config-prompt-item'
import Editor from '@/app/components/workflow/nodes/_base/components/prompt/editor'
@@ -30,6 +30,8 @@ type Props = {
history: boolean
query: boolean
}
varList?: Variable[]
handleAddVariable: (payload: any) => void
}
const ConfigPrompt: FC<Props> = ({
@@ -42,10 +44,12 @@ const ConfigPrompt: FC<Props> = ({
onChange,
isShowContext,
hasSetBlockStatus,
varList = [],
handleAddVariable,
}) => {
const { t } = useTranslation()
const payloadWithIds = (isChatModel && Array.isArray(payload))
? payload.map((item, i) => {
? payload.map((item) => {
const id = uuid4()
return {
id: item.id || id,
@@ -67,7 +71,16 @@ const ConfigPrompt: FC<Props> = ({
const handleChatModePromptChange = useCallback((index: number) => {
return (prompt: string) => {
const newPrompt = produce(payload as PromptItem[], (draft) => {
draft[index].text = prompt
draft[index][draft[index].edition_type === EditionType.jinja2 ? 'jinja2_text' : 'text'] = prompt
})
onChange(newPrompt)
}
}, [onChange, payload])
const handleChatModeEditionTypeChange = useCallback((index: number) => {
return (editionType: EditionType) => {
const newPrompt = produce(payload as PromptItem[], (draft) => {
draft[index].edition_type = editionType
})
onChange(newPrompt)
}
@@ -106,7 +119,14 @@ const ConfigPrompt: FC<Props> = ({
const handleCompletionPromptChange = useCallback((prompt: string) => {
const newPrompt = produce(payload as PromptItem, (draft) => {
draft.text = prompt
draft[draft.edition_type === EditionType.jinja2 ? 'jinja2_text' : 'text'] = prompt
})
onChange(newPrompt)
}, [onChange, payload])
const handleCompletionEditionTypeChange = useCallback((editionType: EditionType) => {
const newPrompt = produce(payload as PromptItem, (draft) => {
draft.edition_type = editionType
})
onChange(newPrompt)
}, [onChange, payload])
@@ -161,11 +181,14 @@ const ConfigPrompt: FC<Props> = ({
isChatApp={isChatApp}
payload={item}
onPromptChange={handleChatModePromptChange(index)}
onEditionTypeChange={handleChatModeEditionTypeChange(index)}
onRemove={handleRemove(index)}
isShowContext={isShowContext}
hasSetBlockStatus={hasSetBlockStatus}
availableVars={availableVars}
availableNodes={availableNodes}
varList={varList}
handleAddVariable={handleAddVariable}
/>
</div>
@@ -187,7 +210,7 @@ const ConfigPrompt: FC<Props> = ({
<Editor
instanceId={`${nodeId}-chat-workflow-llm-prompt-editor`}
title={<span className='capitalize'>{t(`${i18nPrefix}.prompt`)}</span>}
value={(payload as PromptItem).text}
value={(payload as PromptItem).edition_type === EditionType.basic ? (payload as PromptItem).text : ((payload as PromptItem).jinja2_text || '')}
onChange={handleCompletionPromptChange}
readOnly={readOnly}
isChatModel={isChatModel}
@@ -196,6 +219,11 @@ const ConfigPrompt: FC<Props> = ({
hasSetBlockStatus={hasSetBlockStatus}
nodesOutputVars={availableVars}
availableNodes={availableNodes}
isSupportJinja
editionType={(payload as PromptItem).edition_type}
varList={varList}
onEditionTypeChange={handleCompletionEditionTypeChange}
handleAddVariable={handleAddVariable}
/>
</div>
)}

View File

@@ -1,7 +1,6 @@
import { BlockEnum } from '../../types'
import { type NodeDefault, PromptRole } from '../../types'
import { BlockEnum, EditionType } from '../../types'
import { type NodeDefault, type PromptItem, PromptRole } from '../../types'
import type { LLMNodeType } from './types'
import type { PromptItem } from '@/models/debug'
import { ALL_CHAT_AVAILABLE_BLOCKS, ALL_COMPLETION_AVAILABLE_BLOCKS } from '@/app/components/workflow/constants'
const i18nPrefix = 'workflow.errorMsg'
@@ -16,7 +15,6 @@ const nodeDefault: NodeDefault<LLMNodeType> = {
temperature: 0.7,
},
},
variables: [],
prompt_template: [{
role: PromptRole.system,
text: '',
@@ -57,6 +55,23 @@ const nodeDefault: NodeDefault<LLMNodeType> = {
if (isChatModel && !!payload.memory.query_prompt_template && !payload.memory.query_prompt_template.includes('{{#sys.query#}}'))
errorMessages = t('workflow.nodes.llm.sysQueryInUser')
}
if (!errorMessages) {
const isChatModel = payload.model.mode === 'chat'
const isShowVars = (() => {
if (isChatModel)
return (payload.prompt_template as PromptItem[]).some(item => item.edition_type === EditionType.jinja2)
return (payload.prompt_template as PromptItem).edition_type === EditionType.jinja2
})()
if (isShowVars && payload.prompt_config?.jinja2_variables) {
payload.prompt_config?.jinja2_variables.forEach((i) => {
if (!errorMessages && !i.variable)
errorMessages = t(`${i18nPrefix}.fieldRequired`, { field: t(`${i18nPrefix}.fields.variable`) })
if (!errorMessages && !i.value_selector.length)
errorMessages = t(`${i18nPrefix}.fieldRequired`, { field: t(`${i18nPrefix}.fields.variableValue`) })
})
}
}
return {
isValid: !errorMessages,
errorMessage: errorMessages,

View File

@@ -7,6 +7,8 @@ import useConfig from './use-config'
import ResolutionPicker from './components/resolution-picker'
import type { LLMNodeType } from './types'
import ConfigPrompt from './components/config-prompt'
import VarList from '@/app/components/workflow/nodes/_base/components/variable/var-list'
import AddButton2 from '@/app/components/base/button/add-button'
import Field from '@/app/components/workflow/nodes/_base/components/field'
import Split from '@/app/components/workflow/nodes/_base/components/split'
import ModelParameterModal from '@/app/components/header/account-setting/model-provider-page/model-parameter-modal'
@@ -44,7 +46,12 @@ const Panel: FC<NodePanelProps<LLMNodeType>> = ({
filterVar,
availableVars,
availableNodes,
isShowVars,
handlePromptChange,
handleAddEmptyVariable,
handleAddVariable,
handleVarListChange,
handleVarNameChange,
handleSyeQueryChange,
handleMemoryChange,
handleVisionResolutionEnabledChange,
@@ -169,9 +176,29 @@ const Panel: FC<NodePanelProps<LLMNodeType>> = ({
payload={inputs.prompt_template}
onChange={handlePromptChange}
hasSetBlockStatus={hasSetBlockStatus}
varList={inputs.prompt_config?.jinja2_variables || []}
handleAddVariable={handleAddVariable}
/>
)}
{isShowVars && (
<Field
title={t('workflow.nodes.templateTransform.inputVars')}
operations={
!readOnly ? <AddButton2 onClick={handleAddEmptyVariable} /> : undefined
}
>
<VarList
nodeId={id}
readonly={readOnly}
list={inputs.prompt_config?.jinja2_variables || []}
onChange={handleVarListChange}
onVarNameChange={handleVarNameChange}
filterVar={filterVar}
/>
</Field>
)}
{/* Memory put place examples. */}
{isChatMode && isChatModel && !!inputs.memory && (
<div className='mt-4'>

View File

@@ -3,8 +3,10 @@ import type { CommonNodeType, Memory, ModelConfig, PromptItem, ValueSelector, Va
export type LLMNodeType = CommonNodeType & {
model: ModelConfig
variables: Variable[]
prompt_template: PromptItem[] | PromptItem
prompt_config?: {
jinja2_variables?: Variable[]
}
memory?: Memory
context: {
enabled: boolean

View File

@@ -1,8 +1,7 @@
import { useCallback, useEffect, useRef, useState } from 'react'
import produce from 'immer'
import useVarList from '../_base/hooks/use-var-list'
import { VarType } from '../../types'
import type { Memory, ValueSelector, Var } from '../../types'
import { EditionType, VarType } from '../../types'
import type { Memory, PromptItem, ValueSelector, Var, Variable } from '../../types'
import { useStore } from '../../store'
import {
useIsChatMode,
@@ -18,7 +17,6 @@ import {
} from '@/app/components/header/account-setting/model-provider-page/declarations'
import useNodeCrud from '@/app/components/workflow/nodes/_base/hooks/use-node-crud'
import useOneStepRun from '@/app/components/workflow/nodes/_base/hooks/use-one-step-run'
import type { PromptItem } from '@/models/debug'
import { RETRIEVAL_OUTPUT_STRUCT } from '@/app/components/workflow/constants'
import { checkHasContextBlock, checkHasHistoryBlock, checkHasQueryBlock } from '@/app/components/base/prompt-editor/constants'
@@ -29,20 +27,21 @@ const useConfig = (id: string, payload: LLMNodeType) => {
const defaultConfig = useStore(s => s.nodesDefaultConfigs)[payload.type]
const [defaultRolePrefix, setDefaultRolePrefix] = useState<{ user: string; assistant: string }>({ user: '', assistant: '' })
const { inputs, setInputs: doSetInputs } = useNodeCrud<LLMNodeType>(id, payload)
const inputRef = useRef(inputs)
const setInputs = useCallback((newInputs: LLMNodeType) => {
if (newInputs.memory && !newInputs.memory.role_prefix) {
const newPayload = produce(newInputs, (draft) => {
draft.memory!.role_prefix = defaultRolePrefix
})
doSetInputs(newPayload)
inputRef.current = newPayload
return
}
doSetInputs(newInputs)
inputRef.current = newInputs
}, [doSetInputs, defaultRolePrefix])
const inputRef = useRef(inputs)
useEffect(() => {
inputRef.current = inputs
}, [inputs])
// model
const model = inputs.model
const modelMode = inputs.model?.mode
@@ -178,11 +177,80 @@ const useConfig = (id: string, payload: LLMNodeType) => {
}
// eslint-disable-next-line react-hooks/exhaustive-deps
}, [isShowVisionConfig, modelChanged])
// variables
const { handleVarListChange, handleAddVariable } = useVarList<LLMNodeType>({
inputs,
setInputs,
})
const isShowVars = (() => {
if (isChatModel)
return (inputs.prompt_template as PromptItem[]).some(item => item.edition_type === EditionType.jinja2)
return (inputs.prompt_template as PromptItem).edition_type === EditionType.jinja2
})()
const handleAddEmptyVariable = useCallback(() => {
const newInputs = produce(inputRef.current, (draft) => {
if (!draft.prompt_config) {
draft.prompt_config = {
jinja2_variables: [],
}
}
if (!draft.prompt_config.jinja2_variables)
draft.prompt_config.jinja2_variables = []
draft.prompt_config.jinja2_variables.push({
variable: '',
value_selector: [],
})
})
setInputs(newInputs)
}, [setInputs])
const handleAddVariable = useCallback((payload: Variable) => {
const newInputs = produce(inputRef.current, (draft) => {
if (!draft.prompt_config) {
draft.prompt_config = {
jinja2_variables: [],
}
}
if (!draft.prompt_config.jinja2_variables)
draft.prompt_config.jinja2_variables = []
draft.prompt_config.jinja2_variables.push(payload)
})
setInputs(newInputs)
}, [setInputs])
const handleVarListChange = useCallback((newList: Variable[]) => {
const newInputs = produce(inputRef.current, (draft) => {
if (!draft.prompt_config) {
draft.prompt_config = {
jinja2_variables: [],
}
}
if (!draft.prompt_config.jinja2_variables)
draft.prompt_config.jinja2_variables = []
draft.prompt_config.jinja2_variables = newList
})
setInputs(newInputs)
}, [setInputs])
const handleVarNameChange = useCallback((oldName: string, newName: string) => {
const newInputs = produce(inputRef.current, (draft) => {
if (isChatModel) {
const promptTemplate = draft.prompt_template as PromptItem[]
promptTemplate.filter(item => item.edition_type === EditionType.jinja2).forEach((item) => {
item.jinja2_text = (item.jinja2_text || '').replaceAll(`{{ ${oldName} }}`, `{{ ${newName} }}`)
})
}
else {
if ((draft.prompt_template as PromptItem).edition_type !== EditionType.jinja2)
return
const promptTemplate = draft.prompt_template as PromptItem
promptTemplate.jinja2_text = (promptTemplate.jinja2_text || '').replaceAll(`{{ ${oldName} }}`, `{{ ${newName} }}`)
}
})
setInputs(newInputs)
}, [isChatModel, setInputs])
// context
const handleContextVarChange = useCallback((newVar: ValueSelector | string) => {
@@ -194,11 +262,11 @@ const useConfig = (id: string, payload: LLMNodeType) => {
}, [inputs, setInputs])
const handlePromptChange = useCallback((newPrompt: PromptItem[] | PromptItem) => {
const newInputs = produce(inputs, (draft) => {
const newInputs = produce(inputRef.current, (draft) => {
draft.prompt_template = newPrompt
})
setInputs(newInputs)
}, [inputs, setInputs])
}, [setInputs])
const handleMemoryChange = useCallback((newMemory?: Memory) => {
const newInputs = produce(inputs, (draft) => {
@@ -286,6 +354,7 @@ const useConfig = (id: string, payload: LLMNodeType) => {
runInputData,
setRunInputData,
runResult,
toVarInputs,
} = useOneStepRun<LLMNodeType>({
id,
data: inputs,
@@ -295,23 +364,6 @@ const useConfig = (id: string, payload: LLMNodeType) => {
},
})
// const handleRun = (submitData: Record<string, any>) => {
// console.log(submitData)
// const res = produce(submitData, (draft) => {
// debugger
// if (draft.contexts) {
// draft['#context#'] = draft.contexts
// delete draft.contexts
// }
// if (draft.visionFiles) {
// draft['#files#'] = draft.visionFiles
// delete draft.visionFiles
// }
// })
// doHandleRun(res)
// }
const inputVarValues = (() => {
const vars: Record<string, any> = {}
Object.keys(runInputData)
@@ -348,7 +400,7 @@ const useConfig = (id: string, payload: LLMNodeType) => {
}, [runInputData, setRunInputData])
const allVarStrArr = (() => {
const arr = isChatModel ? (inputs.prompt_template as PromptItem[]).map(item => item.text) : [(inputs.prompt_template as PromptItem).text]
const arr = isChatModel ? (inputs.prompt_template as PromptItem[]).filter(item => item.edition_type !== EditionType.jinja2).map(item => item.text) : [(inputs.prompt_template as PromptItem).text]
if (isChatMode && isChatModel && !!inputs.memory) {
arr.push('{{#sys.query#}}')
arr.push(inputs.memory.query_prompt_template)
@@ -357,7 +409,13 @@ const useConfig = (id: string, payload: LLMNodeType) => {
return arr
})()
const varInputs = getInputVars(allVarStrArr)
const varInputs = (() => {
const vars = getInputVars(allVarStrArr)
if (isShowVars)
return [...vars, ...toVarInputs(inputs.prompt_config?.jinja2_variables || [])]
return vars
})()
return {
readOnly,
@@ -370,8 +428,11 @@ const useConfig = (id: string, payload: LLMNodeType) => {
isShowVisionConfig,
handleModelChanged,
handleCompletionParamsChange,
isShowVars,
handleVarListChange,
handleVarNameChange,
handleAddVariable,
handleAddEmptyVariable,
handleContextVarChange,
filterInputVar,
filterVar,