|
@@ -17,37 +17,37 @@ const systemVars = [
|
|
|
},
|
|
|
{
|
|
|
label: '对话数量',
|
|
|
- key: 'sys.dialogue_count',
|
|
|
+ key: 'sys.dialogueCount',
|
|
|
type: 'Number',
|
|
|
source: VarsSource.Root,
|
|
|
},
|
|
|
{
|
|
|
label: '会话ID',
|
|
|
- key: 'sys.conversation_id',
|
|
|
+ key: 'sys.conversationId',
|
|
|
type: 'String',
|
|
|
source: VarsSource.Root,
|
|
|
},
|
|
|
{
|
|
|
label: '用户ID',
|
|
|
- key: 'sys.user_id',
|
|
|
+ key: 'sys.userId',
|
|
|
type: 'String',
|
|
|
source: VarsSource.Root,
|
|
|
},
|
|
|
{
|
|
|
label: '应用ID',
|
|
|
- key: 'sys.app_id',
|
|
|
+ key: 'sys.appId',
|
|
|
type: 'String',
|
|
|
source: VarsSource.Root,
|
|
|
},
|
|
|
{
|
|
|
label: '工作流ID',
|
|
|
- key: 'sys.workflow_id',
|
|
|
+ key: 'sys.workflowId',
|
|
|
type: 'String',
|
|
|
source: VarsSource.Root,
|
|
|
},
|
|
|
{
|
|
|
label: '工作流运行ID',
|
|
|
- key: 'sys.workflow_run_id',
|
|
|
+ key: 'sys.workflowRunId',
|
|
|
type: 'String',
|
|
|
source: VarsSource.Root,
|
|
|
},
|
|
@@ -228,7 +228,7 @@ export const handleNodeSubmit = (no) => {
|
|
|
required: v.required,
|
|
|
}
|
|
|
if (v.type === 'String') {
|
|
|
- obj.max_length = Number(v.length)
|
|
|
+ obj.maxLength = Number(v.length)
|
|
|
} else if (v.type === 'Select') {
|
|
|
obj.options = v.options
|
|
|
}
|
|
@@ -249,19 +249,19 @@ export const handleNodeSubmit = (no) => {
|
|
|
no.__modelConfig.paramsConfig.temperature
|
|
|
}
|
|
|
if (no.__modelConfig.paramsConfig.isTopP) {
|
|
|
- no.modelConfig.paramConfigs.top_p =
|
|
|
+ no.modelConfig.paramConfigs.topK =
|
|
|
no.__modelConfig.paramsConfig.topP
|
|
|
}
|
|
|
if (no.__modelConfig.paramsConfig.isFrequency) {
|
|
|
- no.modelConfig.paramConfigs.frequency_penalty =
|
|
|
+ no.modelConfig.paramConfigs.frequencyPenalty =
|
|
|
no.__modelConfig.paramsConfig.frequency
|
|
|
}
|
|
|
if (no.__modelConfig.paramsConfig.isExist) {
|
|
|
- no.modelConfig.paramConfigs.presence_penalty =
|
|
|
+ no.modelConfig.paramConfigs.presencePenalty =
|
|
|
no.__modelConfig.paramsConfig.exist
|
|
|
}
|
|
|
if (no.__modelConfig.paramsConfig.isTokens) {
|
|
|
- no.modelConfig.paramConfigs.max_tokens =
|
|
|
+ no.modelConfig.paramConfigs.maxTokens =
|
|
|
no.__modelConfig.paramsConfig.tokens
|
|
|
}
|
|
|
if (no.__modelConfig.paramsConfig.stopSequence?.length > 0) {
|
|
@@ -273,29 +273,29 @@ export const handleNodeSubmit = (no) => {
|
|
|
break
|
|
|
case NodeType.Knowledge:
|
|
|
{
|
|
|
- no.multiple_retrieval_config = {
|
|
|
- reranking_enable: false,
|
|
|
- reranking_mode:
|
|
|
+ no.multipleRetrievalConfig = {
|
|
|
+ rerankingEnable: false,
|
|
|
+ rerankingMode:
|
|
|
no.__recallConfig.indexMethod === 'weight'
|
|
|
? 'weighted_score'
|
|
|
- : 'reranking_model',
|
|
|
- top_k: no.__recallConfig.topK,
|
|
|
- score_threshold:
|
|
|
+ : 'rerankingModel',
|
|
|
+ topK: no.__recallConfig.topK,
|
|
|
+ scoreThreshold:
|
|
|
no.__recallConfig.isScore == 1 ? no.__recallConfig.score : null,
|
|
|
}
|
|
|
- if (no.multiple_retrieval_config.reranking_mode === 'weighted_score') {
|
|
|
- no.multiple_retrieval_config.weights = {
|
|
|
- vector_setting: {
|
|
|
- vector_weight: no.__recallConfig.weight,
|
|
|
+ if (no.multipleRetrievalConfig.rerankingMode === 'weighted_score') {
|
|
|
+ no.multipleRetrievalConfig.weights = {
|
|
|
+ vectorSetting: {
|
|
|
+ vectorWeight: no.__recallConfig.weight,
|
|
|
// embedding_provider_name: 'langgenius/ollama/ollama',
|
|
|
// embedding_model_name: 'bge-m3:567m',
|
|
|
},
|
|
|
- keyword_setting: {
|
|
|
- keyword_weight: 1 - no.__recallConfig.weight,
|
|
|
+ keywordSetting: {
|
|
|
+ keywordWeight: 1 - no.__recallConfig.weight,
|
|
|
},
|
|
|
}
|
|
|
} else {
|
|
|
- no.multiple_retrieval_config.reranking_model = {
|
|
|
+ no.multipleRetrievalConfig.rerankingModel = {
|
|
|
pluginInstanceId: no.__recallConfig.rerank,
|
|
|
}
|
|
|
}
|
|
@@ -308,21 +308,21 @@ export const handleNodeSubmit = (no) => {
|
|
|
.map((v) => {
|
|
|
const obj = {
|
|
|
id: v.id,
|
|
|
- case_id: v.id,
|
|
|
- logical_operator: v.mode,
|
|
|
+ caseId: v.id,
|
|
|
+ logicalOperator: v.mode,
|
|
|
conditions:
|
|
|
v.cases?.map((c) => {
|
|
|
const cObj: any = {
|
|
|
id: c.id,
|
|
|
varType: c.source.type.toLowerCase(),
|
|
|
- variable_selector:
|
|
|
+ variableSelector:
|
|
|
c.source.source === VarsSource.Root
|
|
|
? [
|
|
|
c.source.key.split('.')[0],
|
|
|
c.source.key.split('.')[1],
|
|
|
]
|
|
|
: [c.source.nodeId, c.source.key],
|
|
|
- comparison_operator: c.method,
|
|
|
+ comparisonOperator: c.method,
|
|
|
value:
|
|
|
c.type === ConditionType.Constant
|
|
|
? c.target
|
|
@@ -353,19 +353,19 @@ export const handleNodeSubmit = (no) => {
|
|
|
no.__modelConfig.paramsConfig.temperature
|
|
|
}
|
|
|
if (no.__modelConfig.paramsConfig.isTopP) {
|
|
|
- no.modelConfig.paramConfigs.top_p =
|
|
|
+ no.modelConfig.paramConfigs.topP =
|
|
|
no.__modelConfig.paramsConfig.topP
|
|
|
}
|
|
|
if (no.__modelConfig.paramsConfig.isFrequency) {
|
|
|
- no.modelConfig.paramConfigs.frequency_penalty =
|
|
|
+ no.modelConfig.paramConfigs.frequencyPenalty =
|
|
|
no.__modelConfig.paramsConfig.frequency
|
|
|
}
|
|
|
if (no.__modelConfig.paramsConfig.isExist) {
|
|
|
- no.modelConfig.paramConfigs.presence_penalty =
|
|
|
+ no.modelConfig.paramConfigs.presencePenalty =
|
|
|
no.__modelConfig.paramsConfig.exist
|
|
|
}
|
|
|
if (no.__modelConfig.paramsConfig.isTokens) {
|
|
|
- no.modelConfig.paramConfigs.max_tokens =
|
|
|
+ no.modelConfig.paramConfigs.maxTokens =
|
|
|
no.__modelConfig.paramsConfig.tokens
|
|
|
}
|
|
|
if (no.__modelConfig.paramsConfig.stopSequence?.length > 0) {
|