Skip to content

Commit 563516f

Browse files
committed
feat: AI chat node mcp server config
1 parent 2d6ac80 commit 563516f

File tree

4 files changed

+193
-1
lines changed

4 files changed

+193
-1
lines changed

apps/application/flow/step_node/ai_chat_step_node/i_chat_node.py

Lines changed: 5 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -33,6 +33,9 @@ class ChatNodeSerializer(serializers.Serializer):
3333
error_messages=ErrMessage.dict('Model settings'))
3434
dialogue_type = serializers.CharField(required=False, allow_blank=True, allow_null=True,
3535
error_messages=ErrMessage.char(_("Context Type")))
36+
mcp_enable = serializers.BooleanField(required=False,
37+
error_messages=ErrMessage.boolean(_("Whether to enable MCP")))
38+
mcp_servers = serializers.JSONField(required=False, error_messages=ErrMessage.list(_("MCP Server")))
3639

3740

3841
class IChatNode(INode):
@@ -49,5 +52,7 @@ def execute(self, model_id, system, prompt, dialogue_number, history_chat_record
4952
model_params_setting=None,
5053
dialogue_type=None,
5154
model_setting=None,
55+
mcp_enable=False,
56+
mcp_servers=None,
5257
**kwargs) -> NodeResult:
5358
pass

apps/application/flow/step_node/ai_chat_step_node/impl/base_chat_node.py

Lines changed: 58 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -6,14 +6,19 @@
66
@date:2024/6/4 14:30
77
@desc:
88
"""
9+
import asyncio
10+
import json
911
import re
1012
import time
1113
from functools import reduce
14+
from types import AsyncGeneratorType
1215
from typing import List, Dict
1316

1417
from django.db.models import QuerySet
1518
from langchain.schema import HumanMessage, SystemMessage
16-
from langchain_core.messages import BaseMessage, AIMessage
19+
from langchain_core.messages import BaseMessage, AIMessage, AIMessageChunk, ToolMessage
20+
from langchain_mcp_adapters.client import MultiServerMCPClient
21+
from langgraph.prebuilt import create_react_agent
1722

1823
from application.flow.i_step_node import NodeResult, INode
1924
from application.flow.step_node.ai_chat_step_node.i_chat_node import IChatNode
@@ -56,6 +61,7 @@ def write_context_stream(node_variable: Dict, workflow_variable: Dict, node: INo
5661
reasoning = Reasoning(model_setting.get('reasoning_content_start', '<think>'),
5762
model_setting.get('reasoning_content_end', '</think>'))
5863
response_reasoning_content = False
64+
5965
for chunk in response:
6066
reasoning_chunk = reasoning.get_reasoning_content(chunk)
6167
content_chunk = reasoning_chunk.get('content')
@@ -84,6 +90,47 @@ def write_context_stream(node_variable: Dict, workflow_variable: Dict, node: INo
8490
_write_context(node_variable, workflow_variable, node, workflow, answer, reasoning_content)
8591

8692

93+
94+
async def _yield_mcp_response(chat_model, message_list, mcp_servers):
95+
async with MultiServerMCPClient(json.loads(mcp_servers)) as client:
96+
agent = create_react_agent(chat_model, client.get_tools())
97+
response = agent.astream({"messages": message_list}, stream_mode='messages')
98+
async for chunk in response:
99+
# if isinstance(chunk[0], ToolMessage):
100+
# print(chunk[0])
101+
if isinstance(chunk[0], AIMessageChunk):
102+
yield chunk[0]
103+
104+
def mcp_response_generator(chat_model, message_list, mcp_servers):
105+
loop = asyncio.new_event_loop()
106+
try:
107+
async_gen = _yield_mcp_response(chat_model, message_list, mcp_servers)
108+
while True:
109+
try:
110+
chunk = loop.run_until_complete(anext_async(async_gen))
111+
yield chunk
112+
except StopAsyncIteration:
113+
break
114+
except Exception as e:
115+
print(f'exception: {e}')
116+
finally:
117+
loop.close()
118+
119+
async def anext_async(agen):
120+
return await agen.__anext__()
121+
122+
async def _get_mcp_response(chat_model, message_list, mcp_servers):
123+
async with MultiServerMCPClient(json.loads(mcp_servers)) as client:
124+
agent = create_react_agent(chat_model, client.get_tools())
125+
response = agent.astream({"messages": message_list}, stream_mode='messages')
126+
result = []
127+
async for chunk in response:
128+
# if isinstance(chunk[0], ToolMessage):
129+
# print(chunk[0].content)
130+
if isinstance(chunk[0], AIMessageChunk):
131+
result.append(chunk[0])
132+
return result
133+
87134
def write_context(node_variable: Dict, workflow_variable: Dict, node: INode, workflow):
88135
"""
89136
写入上下文数据
@@ -142,6 +189,8 @@ def execute(self, model_id, system, prompt, dialogue_number, history_chat_record
142189
model_params_setting=None,
143190
dialogue_type=None,
144191
model_setting=None,
192+
mcp_enable=False,
193+
mcp_servers=None,
145194
**kwargs) -> NodeResult:
146195
if dialogue_type is None:
147196
dialogue_type = 'WORKFLOW'
@@ -163,6 +212,14 @@ def execute(self, model_id, system, prompt, dialogue_number, history_chat_record
163212
self.context['system'] = system
164213
message_list = self.generate_message_list(system, prompt, history_message)
165214
self.context['message_list'] = message_list
215+
216+
if mcp_enable and mcp_servers is not None:
217+
r = mcp_response_generator(chat_model, message_list, mcp_servers)
218+
return NodeResult(
219+
{'result': r, 'chat_model': chat_model, 'message_list': message_list,
220+
'history_message': history_message, 'question': question.content}, {},
221+
_write_context=write_context_stream)
222+
166223
if stream:
167224
r = chat_model.stream(message_list)
168225
return NodeResult({'result': r, 'chat_model': chat_model, 'message_list': message_list,
Lines changed: 98 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,98 @@
1+
<template>
2+
<el-dialog
3+
align-center
4+
:title="$t('common.setting')"
5+
class="param-dialog"
6+
v-model="dialogVisible"
7+
style="width: 550px"
8+
append-to-body
9+
:close-on-click-modal="false"
10+
:close-on-press-escape="false"
11+
>
12+
<el-form label-position="top" ref="paramFormRef" :model="form"
13+
require-asterisk-position="right">
14+
<el-form-item label="MCP" prop="mcp_enable">
15+
<el-switch v-model="form.mcp_enable" />
16+
</el-form-item>
17+
<el-form-item label="MCP Server Config" prop="mcp_servers"
18+
:rules="[{ required: true, message: $t('common.required') }]">
19+
<el-input
20+
v-model="form.mcp_servers"
21+
:rows="6"
22+
type="textarea"
23+
/>
24+
</el-form-item>
25+
</el-form>
26+
27+
<template #footer>
28+
<span class="dialog-footer p-16">
29+
<el-button @click.prevent="dialogVisible = false">{{ $t('common.cancel') }}</el-button>
30+
<el-button type="primary" @click="submit()" :loading="loading">
31+
{{ $t('common.save') }}
32+
</el-button>
33+
</span>
34+
</template>
35+
</el-dialog>
36+
</template>
37+
<script setup lang="ts">
38+
import { ref, watch } from 'vue'
39+
40+
const emit = defineEmits(['refresh'])
41+
42+
const paramFormRef = ref()
43+
44+
const form = ref<any>({
45+
mcp_servers: '',
46+
mcp_enable: false
47+
})
48+
49+
const dialogVisible = ref<boolean>(false)
50+
const loading = ref(false)
51+
watch(dialogVisible, (bool) => {
52+
if (!bool) {
53+
form.value = {
54+
mcp_servers: '',
55+
mcp_enable: false
56+
}
57+
}
58+
})
59+
60+
const open = (data: any) => {
61+
form.value = { ...form.value, ...data }
62+
dialogVisible.value = true
63+
}
64+
65+
const submit = () => {
66+
paramFormRef.value.validate().then((valid: any) => {
67+
if (valid) {
68+
emit('refresh', form.value)
69+
dialogVisible.value = false
70+
}
71+
})
72+
}
73+
74+
defineExpose({ open })
75+
</script>
76+
<style lang="scss" scoped>
77+
.param-dialog {
78+
padding: 8px 8px 24px 8px;
79+
80+
.el-dialog__header {
81+
padding: 16px 16px 0 16px;
82+
}
83+
84+
.el-dialog__body {
85+
padding: 0 !important;
86+
}
87+
88+
.dialog-max-height {
89+
height: 560px;
90+
}
91+
92+
.custom-slider {
93+
.el-input-number.is-without-controls .el-input__wrapper {
94+
padding: 0 !important;
95+
}
96+
}
97+
}
98+
</style>

ui/src/workflow/nodes/ai-chat-node/index.vue

Lines changed: 32 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -116,6 +116,22 @@
116116
/>
117117
</el-form-item>
118118

119+
<el-form-item>
120+
<template #label>
121+
<div class="flex-between">
122+
<div>{{ $t('views.applicationWorkflow.nodes.mcpNode.tool') }}</div>
123+
<el-button
124+
type="primary"
125+
link
126+
@click="openMcpServersDialog"
127+
@refreshForm="refreshParam"
128+
>
129+
<el-icon><Setting /></el-icon>
130+
</el-button>
131+
</div>
132+
</template>
133+
</el-form-item>
134+
119135
<el-form-item @click.prevent>
120136
<template #label>
121137
<div class="flex align-center">
@@ -163,6 +179,7 @@
163179
ref="ReasoningParamSettingDialogRef"
164180
@refresh="submitReasoningDialog"
165181
/>
182+
<McpServersDialog ref="mcpServersDialogRef" @refresh="submitMcpServersDialog" />
166183
</NodeContainer>
167184
</template>
168185
<script setup lang="ts">
@@ -177,6 +194,7 @@ import { isLastNode } from '@/workflow/common/data'
177194
import AIModeParamSettingDialog from '@/views/application/component/AIModeParamSettingDialog.vue'
178195
import { t } from '@/locales'
179196
import ReasoningParamSettingDialog from '@/views/application/component/ReasoningParamSettingDialog.vue'
197+
import McpServersDialog from '@/views/application/component/McpServersDialog.vue'
180198
const { model } = useStore()
181199
182200
const wheel = (e: any) => {
@@ -300,6 +318,20 @@ function submitReasoningDialog(val: any) {
300318
set(props.nodeModel.properties.node_data, 'model_setting', model_setting)
301319
}
302320
321+
const mcpServersDialogRef = ref()
322+
function openMcpServersDialog() {
323+
const config = {
324+
mcp_servers: chat_data.value.mcp_servers,
325+
mcp_enable: chat_data.value.mcp_enable
326+
}
327+
mcpServersDialogRef.value.open(config)
328+
}
329+
330+
function submitMcpServersDialog(config: any) {
331+
set(props.nodeModel.properties.node_data, 'mcp_servers', config.mcp_servers)
332+
set(props.nodeModel.properties.node_data, 'mcp_enable', config.mcp_enable)
333+
}
334+
303335
onMounted(() => {
304336
getModel()
305337
if (typeof props.nodeModel.properties.node_data?.is_result === 'undefined') {

0 commit comments

Comments
 (0)