0.1.0 所有 mvp 完成

This commit is contained in:
锦恢 2025-05-21 20:29:50 +08:00
parent 4017fc3290
commit f37b8babcd
37 changed files with 1684 additions and 161 deletions

View File

@ -5,6 +5,8 @@
- 新特性:更新协议内容,支持 streamable http 协议,未来将逐步取代 SSE 的连接方式
- 对于 uv 创建的 py 项目进行特殊支持:自动初始化项目,并将 mcp 定向到 .venv/bin/mcp 中,不再需要用户全局安装 mcp
- 对于 npm 创建的 js/ts 项目进行特殊支持:自动初始化项目
- 去除了 websearch 的设置,增加了 parallel_tool_calls 的设置parallel_tool_calls 默认为 true代表 允许模型在单轮回复中调用多个工具
- 重构了 openmcp 连接模块的基础设施,基于新的技术设施实现了更加详细的连接模块的日志系统
## [main] 0.0.9
- 修复 0.0.8 引入的bugsystem prompt 返回的是索引而非真实内容

View File

@ -54,7 +54,7 @@
| `ext` | 支持基本的 MCP 项目管理 | `迭代版本` | 100% | `P0` |
| `service` | 支持自定义支持 openai 接口协议的大模型接入 | `完整版本` | 100% | `Done` |
| `service` | 支持自定义接口协议的大模型接入 | `MVP` | 0% | `P1` |
| `all` | 支持同时调试多个 MCP Server | `MVP` | 80% | `P0` |
| `all` | 支持同时调试多个 MCP Server | `MVP` | 100% | `P0` |
| `all` | 支持通过大模型进行在线验证 | `迭代版本` | 100% | `Done` |
| `all` | 支持对用户对应服务器的调试工作内容进行保存 | `迭代版本` | 100% | `Done` |
| `render` | 高危操作权限确认 | `MVP` | 0% | `P1` |

View File

@ -1,8 +1,8 @@
@font-face {
font-family: "iconfont"; /* Project id 4870215 */
src: url('iconfont.woff2?t=1746703816245') format('woff2'),
url('iconfont.woff?t=1746703816245') format('woff'),
url('iconfont.ttf?t=1746703816245') format('truetype');
src: url('iconfont.woff2?t=1747820198035') format('woff2'),
url('iconfont.woff?t=1747820198035') format('woff'),
url('iconfont.ttf?t=1747820198035') format('truetype');
}
.iconfont {
@ -13,6 +13,10 @@
-moz-osx-font-smoothing: grayscale;
}
.icon-parallel:before {
content: "\e61d";
}
.icon-waiting:before {
content: "\e6d0";
}

Binary file not shown.

View File

@ -47,11 +47,11 @@ export interface TextMessage {
export type ChatMessage = ToolMessage | TextMessage;
// 新增状态和工具数据
interface EnableToolItem {
export interface EnableToolItem {
name: string;
description: string;
enabled: boolean;
inputSchema?: any;
inputSchema: any;
}
export interface ChatSetting {
@ -61,6 +61,7 @@ export interface ChatSetting {
temperature: number
enableWebSearch: boolean
contextLength: number
parallelToolCalls: boolean
}
export interface ChatStorage {
@ -95,8 +96,6 @@ interface TextItem {
export type RichTextItem = PromptTextItem | ResourceTextItem | TextItem;
export const allTools = ref<ToolItem[]>([]);
export interface ICommonRenderMessage {
role: 'user' | 'assistant/content';
content: string;
@ -121,8 +120,6 @@ export function getToolSchema(enableTools: EnableToolItem[]) {
const enableTool = enableTools[i];
if (enableTool.enabled) {
if (enableTool.inputSchema) {
toolsSchema.push({
type: 'function',
function: {
@ -131,18 +128,6 @@ export function getToolSchema(enableTools: EnableToolItem[]) {
parameters: enableTool.inputSchema
}
});
} else {
const tool = allTools.value[i];
toolsSchema.push({
type: 'function',
function: {
name: tool.name,
description: tool.description || "",
parameters: tool.inputSchema
}
});
}
}
}
return toolsSchema;

View File

@ -0,0 +1,25 @@
<template>
<el-tooltip :content="t('parallel-tool-calls')" placement="top">
<div class="setting-button" :class="{ 'active': tabStorage.settings.parallelToolCalls }" size="small"
@click="toggle">
<span class="iconfont icon-parallel"></span>
</div>
</el-tooltip>
</template>
<script setup lang="ts">
import { inject } from 'vue';
import { useI18n } from 'vue-i18n';
import type { ChatStorage } from '../chat';
const { t } = useI18n();
const tabStorage = inject('tabStorage') as ChatStorage;
const toggle = () => {
tabStorage.settings.parallelToolCalls = !tabStorage.settings.parallelToolCalls;
};
</script>
<style></style>

View File

@ -5,7 +5,7 @@
<ToolUse />
<Prompt />
<Resource />
<Websearch />
<ParallelToolCalls />
<Temperature />
<ContextLength />
</div>
@ -22,7 +22,7 @@ import SystemPrompt from './system-prompt.vue';
import ToolUse from './tool-use.vue';
import Prompt from './prompt.vue';
import Resource from './resource.vue';
import Websearch from './websearch.vue';
import ParallelToolCalls from './parallel-tool-calls.vue';
import Temperature from './temperature.vue';
import ContextLength from './context-length.vue';
@ -57,9 +57,10 @@ if (!tabStorage.settings) {
modelIndex: llmManager.currentModelIndex,
enableTools: [],
enableWebSearch: false,
temperature: 0.7,
temperature: 0.6,
contextLength: 20,
systemPrompt: ''
systemPrompt: '',
parallelToolCalls: true
} as ChatSetting;
}

View File

@ -37,9 +37,9 @@
<script setup lang="ts">
import { ref, computed, inject, onMounted } from 'vue';
import { useI18n } from 'vue-i18n';
import { allTools, type ChatStorage, getToolSchema } from '../chat';
import { type ChatStorage, type EnableToolItem, getToolSchema } from '../chat';
import { markdownToHtml } from '@/components/main-panel/chat/markdown/markdown';
import { useMessageBridge } from '@/api/message-bridge';
import { mcpClientAdapter } from '@/views/connect/core';
const { t } = useI18n();
@ -51,8 +51,6 @@ const availableToolsNum = computed(() => {
return tabStorage.settings.enableTools.filter(tool => tool.enabled).length;
});
// toggleTools
const toggleTools = () => {
showToolsDialog.value = true;
@ -82,19 +80,31 @@ const disableAllTools = () => {
};
onMounted(async () => {
const bridge = useMessageBridge();
const res = await bridge.commandRequest('tools/list');
if (res.code === 200) {
allTools.value = res.msg.tools || [];
tabStorage.settings.enableTools = [];
for (const tool of allTools.value) {
tabStorage.settings.enableTools.push({
// tool tabStorage.settings.enableTools
// enable
const disableToolNames = new Set<string>(
tabStorage.settings.enableTools
.filter(tool => !tool.enabled)
.map(tool => tool.name)
);
const newTools: EnableToolItem[] = [];
for (const client of mcpClientAdapter.clients) {
const tools = await client.getTools();
for (const tool of tools.values()) {
const enabled = !disableToolNames.has(tool.name);
newTools.push({
name: tool.name,
description: tool.description,
enabled: true
inputSchema: tool.inputSchema,
enabled
});
}
}
tabStorage.settings.enableTools = newTools;
});
</script>

View File

@ -1,6 +1,6 @@
import type { ToolCallContent, ToolCallResponse } from "@/hook/type";
import { callTool } from "../../tool/tools";
import { MessageState, type ToolCall } from "../chat-box/chat";
import { mcpClientAdapter } from "@/views/connect/core";
export interface ToolCallResult {
state: MessageState;
@ -25,7 +25,7 @@ export async function handleToolCalls(toolCall: ToolCall): Promise<ToolCallResul
const toolArgs = argsResult.value;
// 进行调用,根据结果返回不同的值
const toolResponse = await callTool(toolName, toolArgs);
const toolResponse = await mcpClientAdapter.callTool(toolName, toolArgs);
return handleToolResponse(toolResponse);
}

View File

@ -157,8 +157,6 @@ export class TaskLoop {
}, { once: true });
console.log(chatData);
this.bridge.postMessage({
command: 'llm/chat/completions',
data: JSON.parse(JSON.stringify(chatData)),
@ -182,6 +180,7 @@ export class TaskLoop {
const model = this.getLlmConfig().userModel;
const temperature = tabStorage.settings.temperature;
const tools = getToolSchema(tabStorage.settings.enableTools);
const parallelToolCalls = tabStorage.settings.parallelToolCalls;
const userMessages = [];
@ -210,6 +209,7 @@ export class TaskLoop {
model,
temperature,
tools,
parallelToolCalls,
messages: userMessages,
} as ChatCompletionCreateParamsBase;

View File

@ -84,10 +84,6 @@ function handleClick(prompt: PromptTemplate) {
}
onMounted(async () => {
for (const client of mcpClientAdapter.clients) {
await client.getPromptTemplates();
}
if (tabStorage.currentPromptName === undefined) {
const masterNode = mcpClientAdapter.masterNode;
const prompt = masterNode.promptTemplates?.values().next();

View File

@ -89,10 +89,6 @@ function handleClick(template: ResourceTemplate) {
}
onMounted(async () => {
for (const client of mcpClientAdapter.clients) {
await client.getResourceTemplates({ cache: false });
}
if (tabStorage.currentResourceName === undefined && tabStorage.currentType === 'template') {
const masterNode = mcpClientAdapter.masterNode;
const resourceTemplate = masterNode?.resourceTemplates?.values().next();

View File

@ -91,10 +91,6 @@ async function handleClick(resource: Resources) {
}
onMounted(async () => {
for (const client of mcpClientAdapter.clients) {
await client.getResources();
}
if (tabStorage.currentResourceName === undefined && tabStorage.currentType === 'resource') {
const masterNode = mcpClientAdapter.masterNode;
const resource = masterNode.resources?.values().next();

View File

@ -65,10 +65,6 @@ function handleClick(tool: { name: string }) {
}
onMounted(async () => {
for (const client of mcpClientAdapter.clients) {
await client.getTools();
}
if (tabStorage.currentToolName === undefined) {
const masterNode = mcpClientAdapter.masterNode;
const tool = masterNode.tools?.values().next();

View File

@ -1,7 +1,4 @@
import { useMessageBridge } from '@/api/message-bridge';
import { mcpSetting } from '@/hook/mcp';
import type { ToolsListResponse, ToolCallResponse, CasualRestAPI } from '@/hook/type';
import { mcpClientAdapter } from '@/views/connect/core';
import type { ToolCallResponse } from '@/hook/type';
export interface ToolStorage {
activeNames: any[];
@ -9,25 +6,3 @@ export interface ToolStorage {
lastToolCallResponse?: ToolCallResponse | string;
formData: Record<string, any>;
}
/**
* @description
* @param toolName
* @param toolArgs
* @returns
*/
export async function callTool(toolName: string, toolArgs: Record<string, any>) {
mcpClientAdapter
const bridge = useMessageBridge();
const { msg } = await bridge.commandRequest<ToolCallResponse>('tools/call', {
toolName,
toolArgs: JSON.parse(JSON.stringify(toolArgs)),
callToolOption: {
timeout: mcpSetting.timeout * 1000
}
});
return msg;
}

View File

@ -155,5 +155,6 @@
"return": "عودة",
"error": "خطأ",
"feedback": "تعليقات",
"waiting-mcp-server": "في انتظار استجابة خادم MCP"
"waiting-mcp-server": "في انتظار استجابة خادم MCP",
"parallel-tool-calls": "السماح للنموذج باستدعاء أدوات متعددة في رد واحد"
}

View File

@ -155,5 +155,6 @@
"return": "Zurück",
"error": "Fehler",
"feedback": "Feedback",
"waiting-mcp-server": "Warten auf Antwort vom MCP-Server"
"waiting-mcp-server": "Warten auf Antwort vom MCP-Server",
"parallel-tool-calls": "Erlauben Sie dem Modell, mehrere Tools in einer einzigen Antwort aufzurufen"
}

View File

@ -155,5 +155,6 @@
"return": "Back",
"error": "Error",
"feedback": "Feedback",
"waiting-mcp-server": "Waiting for MCP server response"
"waiting-mcp-server": "Waiting for MCP server response",
"parallel-tool-calls": "Allow the model to call multiple tools in a single reply"
}

View File

@ -155,5 +155,6 @@
"return": "Retour",
"error": "Erreur",
"feedback": "Retour",
"waiting-mcp-server": "En attente de la réponse du serveur MCP"
"waiting-mcp-server": "En attente de la réponse du serveur MCP",
"parallel-tool-calls": "Permettre au modèle d'appeler plusieurs outils en une seule réponse"
}

View File

@ -155,5 +155,6 @@
"return": "戻る",
"error": "エラー",
"feedback": "フィードバック",
"waiting-mcp-server": "MCPサーバーの応答を待機中"
"waiting-mcp-server": "MCPサーバーの応答を待機中",
"parallel-tool-calls": "モデルが単一の返信で複数のツールを呼び出すことを許可する"
}

View File

@ -155,5 +155,6 @@
"return": "돌아가기",
"error": "오류",
"feedback": "피드백",
"waiting-mcp-server": "MCP 서버 응답 대기 중"
"waiting-mcp-server": "MCP 서버 응답 대기 중",
"parallel-tool-calls": "모델이 단일 응답에서 여러 도구를 호출할 수 있도록 허용"
}

View File

@ -155,5 +155,6 @@
"return": "Назад",
"error": "Ошибка",
"feedback": "Обратная связь",
"waiting-mcp-server": "Ожидание ответа от сервера MCP"
"waiting-mcp-server": "Ожидание ответа от сервера MCP",
"parallel-tool-calls": "Разрешить модели вызывать несколько инструментов в одном ответе"
}

View File

@ -155,5 +155,6 @@
"return": "返回",
"error": "错误",
"feedback": "反馈",
"waiting-mcp-server": "等待 MCP 服务器响应"
"waiting-mcp-server": "等待 MCP 服务器响应",
"parallel-tool-calls": "允许模型在单轮回复中调用多个工具"
}

View File

@ -155,5 +155,6 @@
"return": "返回",
"error": "錯誤",
"feedback": "反饋",
"waiting-mcp-server": "等待MCP伺服器響應"
"waiting-mcp-server": "等待MCP伺服器響應",
"parallel-tool-calls": "允許模型在單輪回覆中調用多個工具"
}

View File

@ -11,13 +11,13 @@
<template #title>
<div class="tool-calls">
<div class="tool-call-header">
<span>{{ log.message.split('\n')[0] }}</span>
<span>{{ log.title }}</span>
</div>
</div>
</template>
<div class="logger-inner">
{{ log.message }}
{{ log.message || '' }}
</div>
</el-collapse-item>
</el-collapse>
@ -52,7 +52,7 @@ function clearLogs() {
<style>
.connection-option {
height: 98%;
height: 90vh;
}
.connection-option .el-scrollbar__view {

View File

@ -1,7 +1,7 @@
<template>
<el-scrollbar>
<div class="connection-container">
<div class="connect-panel-container"
<div class="connect-panel-container left"
:ref="el => client.connectionSettingRef = el"
>
<ConnectionMethod :index="props.index" />
@ -17,7 +17,7 @@
</div>
</div>
<div class="connect-panel-container"
<div class="connect-panel-container right"
:ref="el => client.connectionLogRef = el"
>
<ConnectionLog :index="props.index" />
@ -75,15 +75,26 @@ async function connect() {
}
.connect-panel-container {
.connect-panel-container.left {
display: flex;
flex-direction: column;
width: 45%;
max-height: 85vh;
min-width: 300px;
max-width: 500px;
min-width: 350px;
padding: 5px 20px;
}
.connect-panel-container.right {
display: flex;
flex-direction: column;
width: 55%;
max-height: 85vh;
min-width: 450px;
padding: 5px 20px;
}
.connection-option {
display: flex;
flex-direction: column;

View File

@ -22,6 +22,14 @@ export const connectionSelectDataViewOption: ConnectionTypeOptionItem[] = [
}
]
function prettifyMapKeys(keys: MapIterator<string>) {
const result: string[] = [];
for (const key of keys) {
result.push('+ ' +key);
}
return result.join('\n');
}
export class McpClient {
// 连接入参
@ -60,6 +68,7 @@ export class McpClient {
// 连接出参
this.connectionResult = {
success: false,
reuseConntion: false,
status: 'disconnected',
clientId: '',
name: '',
@ -263,6 +272,7 @@ export class McpClient {
const message = msg.toString();
this.connectionResult.logString.push({
type: 'error',
title: '连接失败',
message
});
@ -271,14 +281,46 @@ export class McpClient {
} else {
this.connectionResult.logString.push({
type: 'info',
message: msg.name + ' ' + msg.version + ' 连接成功'
title: msg.name + ' ' + msg.version + ' 连接成功',
message: JSON.stringify(msg, null, 2)
});
}
this.connectionResult.reuseConntion = msg.reuseConntion;
this.connectionResult.status = msg.status;
this.connectionResult.clientId = msg.clientId;
this.connectionResult.name = msg.name;
this.connectionResult.version = msg.version;
// 刷新所有资源
const tools = await this.getTools({ cache: false });
this.connectionResult.logString.push({
type: 'info',
title: `${this.name}'s tools loaded (${tools.size})`,
message: prettifyMapKeys(tools.keys())
});
const prompts = await this.getPromptTemplates({ cache: false });
this.connectionResult.logString.push({
type: 'info',
title: `${this.name}'s prompts loaded (${prompts.size})`,
message: prettifyMapKeys(prompts.keys())
});
const resources = await this.getResources({ cache: false });
this.connectionResult.logString.push({
type: 'info',
title: `${this.name}'s resources loaded (${resources.size})`,
message: prettifyMapKeys(resources.keys())
});
const resourceTemplates = await this.getResourceTemplates({ cache: false });
this.connectionResult.logString.push({
type: 'info',
title: `${this.name}'s resourceTemplates loaded (${resourceTemplates.size})`,
message: prettifyMapKeys(resourceTemplates.keys())
});
return true;
}
@ -330,14 +372,15 @@ export class McpClient {
if (code === 200) {
this.connectionResult.logString.push({
type: 'info',
message: '预设环境变量同步完成'
title: '预设环境变量同步完成'
});
return msg;
} else {
this.connectionResult.logString.push({
type: 'error',
message: '预设环境变量同步失败: ' + msg
title: '预设环境变量同步失败',
message: msg.toString()
});
}
}
@ -403,9 +446,7 @@ class McpClientAdapter {
this.connectLogListenerCancel = bridge.addCommandListener('connect/log', (message) => {
const { code, msg } = message;
console.log(code, msg);
const client = this.clients.at(-1);
console.log(client);
if (!client) {
return;
@ -413,7 +454,8 @@ class McpClientAdapter {
client.connectionResult.logString.push({
type: code === 200 ? 'info': 'error',
message: msg
title: msg.title,
message: msg.message
});
}, { once: false });

View File

@ -14,8 +14,8 @@
<span v-if="scope.item.client.connectionResult.success"
class="success"
>
<span class="name">{{ scope.item.client.connectionResult.name }}</span>
<span class="iconfont icon-dui"></span>
<span class="name">{{ scope.item.client.connectionResult.name }}</span>
</span>
<span v-else>
<span class="server-name" style="margin-right: 60px;">
@ -124,7 +124,7 @@ function deleteServer(index: number) {
overflow: hidden;
text-overflow: ellipsis;
white-space: nowrap;
margin-right: 5px;
margin-left: 5px;
}
.server-item .success {

View File

@ -8,15 +8,17 @@ export interface ConnectionTypeOptionItem {
export interface IConnectionResult {
info?: string;
success: boolean;
info?: string
success: boolean
reuseConntion: boolean
status: string
clientId: string
name: string
version: string
logString: {
type: 'info' | 'error' | 'warning',
message: string
title: string
message?: string
}[]
}

View File

@ -31,12 +31,13 @@ export async function makeSimpleTalk() {
const chatStorage: ChatStorage = {
messages: [],
settings: {
temperature: 0.7,
temperature: 0.6,
modelIndex: llmManager.currentModelIndex,
systemPrompt: '',
enableTools: [],
enableWebSearch: false,
contextLength: 5
contextLength: 5,
parallelToolCalls: true
}
};

View File

@ -23,6 +23,30 @@ export const llms = [
userToken: '',
userModel: 'gpt-4-turbo'
},
{
id: 'qwen',
name: '通义千问 Qwen',
baseUrl: 'https://dashscope.aliyuncs.com/compatible-mode/v1',
models: ['qwen-max', 'qwen-plus', 'qwen-turbo', 'qwen-long', 'qwen-omni-turbo', 'qwen-omni-turbo-realtime'],
provider: 'Alibaba',
isOpenAICompatible: true,
description: '阿里巴巴通义千问',
website: 'https://help.aliyun.com/zh/model-studio/models#cfc131abafghw',
userToken: '',
userModel: 'qwen-plus'
},
{
id: 'doubao',
name: '豆包 Seed',
baseUrl: 'https://ark.cn-beijing.volces.com/api/v3',
models: ['doubao-1.5-pro-32k', 'doubao-1.5-pro-256k', 'doubao-1.5-lite', 'deepseek-v3'],
provider: 'bytedance',
isOpenAICompatible: true,
description: '字节跳动豆包 Seed',
website: 'https://help.aliyun.com/zh/model-studio/models#cfc131abafghw',
userToken: '',
userModel: 'doubao-1.5-pro-32k'
},
{
id: 'mistral',
name: 'Mistral',

View File

@ -8,17 +8,6 @@ export class LlmController {
@Controller('llm/chat/completions')
async chatCompletion(data: RequestData, webview: PostMessageble) {
let { tools = [] } = data;
const client = getClient(data.clientId);
if (tools.length > 0 && !client) {
return {
code: 501,
msg:'mcp client 尚未连接'
};
}
try {
await streamingChatCompletion(data, webview);

View File

@ -12,7 +12,15 @@ export async function streamingChatCompletion(
data: any,
webview: PostMessageble
) {
let { baseURL, apiKey, model, messages, temperature, tools = [] } = data;
let {
baseURL,
apiKey,
model,
messages,
temperature,
tools = [],
parallelToolCalls = true
} = data;
const client = new OpenAI({
baseURL,
@ -30,8 +38,7 @@ export async function streamingChatCompletion(
messages,
temperature,
tools,
tool_choice: 'auto',
web_search_options: {},
parallel_tool_calls: parallelToolCalls,
stream: true
});

View File

@ -13,9 +13,15 @@ export class ConnectController {
@Controller('lookup-env-var')
async lookupEnvVar(data: RequestData, webview: PostMessageble) {
const client = getClient(data.clientId);
const { keys } = data;
const values = keys.map((key: string) => process.env[key] || '');
const values = keys.map((key: string) => {
// TODO: 在 Windows 上测试
if (process.platform === 'win32' && key.toLowerCase() === 'path') {
key = 'Path'; // 确保正确匹配环境变量的 ke
}
return process.env[key] || '';
});
return {
code: 200,

View File

@ -56,10 +56,20 @@ function getCommandFileExt(option: McpOptions) {
function collectAllOutputExec(command: string, cwd: string) {
return new Promise<string>((resolve, reject) => {
const handler = setTimeout(() => {
resolve('');
}, 5000);
exec(command, { cwd }, (error, stdout, stderr) => {
const errorString = error || '';
const stdoutString = stdout || '';
const stderrString = stderr || '';
console.log('[collectAllOutputExec]', errorString);
console.log('[collectAllOutputExec]', stdoutString);
console.log('[collectAllOutputExec]', stderrString);
clearTimeout(handler);
resolve(errorString + stdoutString + stderrString);
});
});
@ -140,7 +150,10 @@ async function initUv(option: McpOptions, cwd: string, webview?: PostMessageble)
command: 'connect/log',
data: {
code: syncOutput.toLowerCase().startsWith('error') ? 501: 200,
msg: syncOutput
msg: {
title: 'uv sync',
message: syncOutput
}
}
});
@ -149,7 +162,10 @@ async function initUv(option: McpOptions, cwd: string, webview?: PostMessageble)
command: 'connect/log',
data: {
code: addOutput.toLowerCase().startsWith('error') ? 501: 200,
msg: addOutput
msg: {
title: 'uv add mcp "mcp[cli]"',
message: addOutput
}
}
});
}
@ -175,7 +191,10 @@ async function initNpm(option: McpOptions, cwd: string, webview?: PostMessageble
command: 'connect/log',
data: {
code: installOutput.toLowerCase().startsWith('error')? 200: 501,
msg: installOutput
msg: {
title: 'npm i',
message: installOutput
}
}
})
}
@ -212,6 +231,7 @@ export async function connectService(
// 通过 option 字符串进行 hash得到唯一的 uuid
const uuid = await deterministicUUID(JSON.stringify(option));
const reuseConntion = clientMap.has(uuid);
if (!clientMap.has(uuid)) {
const client = await connect(option);
clientMap.set(uuid, client);
@ -226,6 +246,7 @@ export async function connectService(
msg: {
status: 'success',
clientId: uuid,
reuseConntion,
name: versionInfo?.name,
version: versionInfo?.version
}
@ -234,7 +255,7 @@ export async function connectService(
return connectResult;
} catch (error) {
console.log(error);
console.log('[connectService catch error]', error);
// TODO: 这边获取到的 error 不够精致,如何才能获取到更加精准的错误
// 比如 error: Failed to spawn: `server.py`
@ -243,7 +264,10 @@ export async function connectService(
let errorMsg = '';
if (option.command) {
errorMsg += tryGetRunCommandError(option.command, option.args, option.cwd);
errorMsg += await collectAllOutputExec(
option.command + ' ' + (option.args || []).join(' '),
option.cwd || process.cwd()
)
}
errorMsg += (error as any).toString();

File diff suppressed because one or more lines are too long