Merge branch 'main' into feat/auth

This commit is contained in:
Li Yaning 2025-06-03 21:51:26 +08:00 committed by GitHub
commit 740293ab74
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
59 changed files with 1298 additions and 300 deletions

View File

@ -1,5 +1,30 @@
# Change Log
## [main] 0.1.5
- 修复 gemini 获取模型列表时存在 models 前缀的问题
- 增加 web api 功能
## [main] 0.1.4
- 重新实现 openai 协议的底层网络实现,从而支持 Google Gemini 全系列模型。
- 实现 index 适配器,从而支持 Grok3 全系列模型。
- 解决 issue#23 插件创建连接时报错“Cannot read properties of undefined (reading 'name')”
- 在填写 apikey 和 baseurl 的情况下,现在可以一键刷新模型列表,避免用户手动输入模型列表。
## [main] 0.1.3
- 解决 issue#21 点击按钮后的发送文本后不会清空当前的输入框。
- 修复暂停按键在多轮对话后消失的问题。
- 修复 issue#25 无法连接 streamable http 的问题。
## [main] 0.1.2
- 新特性:用户发送的信息增加「重新发送」按钮。
- 支持特性 issue#17 「关于左侧添加mcp服务器操作优化问题」增加强制聚焦功能用户创建mcp服务器连接的过程中不会让输入框失去焦点。
- 更新 MCP & OpenAI 协议内容。
- 解决 issue#21 vscode插件界面bug在高度有限情况下无法通过滚动完全显示连接按钮。
- 解决 issue#21 最后一个标签页关闭并恢复默认页面。
- 解决 issue#22 工具模块UI异常现在 openmcp 支持解析 pydantic 进行 typing 的 python mcp 了。
- 优化对象输入框,现在对象输入框具有语法高亮和受限度的自动补全了。
- 对于 trae 的所有默认主题进行额外支持。
## [main] 0.1.1
- 修复 SSH 连接 Ubuntu 的情况下的部分 bug
- 修复 python 项目点击 openmcp 进行连接时,初始化参数错误的问题

Binary file not shown.

Before

Width:  |  Height:  |  Size: 54 KiB

After

Width:  |  Height:  |  Size: 62 KiB

384
package-lock.json generated
View File

@ -1,23 +1,23 @@
{
"name": "openmcp",
"version": "0.1.1",
"version": "0.1.3",
"lockfileVersion": 3,
"requires": true,
"packages": {
"": {
"name": "openmcp",
"version": "0.1.1",
"version": "0.1.3",
"workspaces": [
"service",
"renderer",
"software"
],
"dependencies": {
"@modelcontextprotocol/sdk": "^1.10.2",
"@modelcontextprotocol/sdk": "^1.12.1",
"@seald-io/nedb": "^4.1.1",
"axios": "^1.7.7",
"axios": "^1.9.0",
"bson": "^6.8.0",
"openai": "^4.93.0",
"openai": "^5.0.1",
"pako": "^2.1.0",
"tesseract.js": "^6.0.1",
"uuid": "^11.1.0",
@ -510,6 +510,133 @@
"node": ">=6.9.0"
}
},
"node_modules/@codemirror/autocomplete": {
"version": "6.18.6",
"resolved": "https://registry.npmjs.org/@codemirror/autocomplete/-/autocomplete-6.18.6.tgz",
"integrity": "sha512-PHHBXFomUs5DF+9tCOM/UoW6XQ4R44lLNNhRaW9PKPTU0D7lIjRg3ElxaJnTwsl/oHiR93WSXDBrekhoUGCPtg==",
"license": "MIT",
"dependencies": {
"@codemirror/language": "^6.0.0",
"@codemirror/state": "^6.0.0",
"@codemirror/view": "^6.17.0",
"@lezer/common": "^1.0.0"
}
},
"node_modules/@codemirror/commands": {
"version": "6.8.1",
"resolved": "https://registry.npmjs.org/@codemirror/commands/-/commands-6.8.1.tgz",
"integrity": "sha512-KlGVYufHMQzxbdQONiLyGQDUW0itrLZwq3CcY7xpv9ZLRHqzkBSoteocBHtMCoY7/Ci4xhzSrToIeLg7FxHuaw==",
"license": "MIT",
"dependencies": {
"@codemirror/language": "^6.0.0",
"@codemirror/state": "^6.4.0",
"@codemirror/view": "^6.27.0",
"@lezer/common": "^1.1.0"
}
},
"node_modules/@codemirror/lang-json": {
"version": "6.0.1",
"resolved": "https://registry.npmjs.org/@codemirror/lang-json/-/lang-json-6.0.1.tgz",
"integrity": "sha512-+T1flHdgpqDDlJZ2Lkil/rLiRy684WMLc74xUnjJH48GQdfJo/pudlTRreZmKwzP8/tGdKf83wlbAdOCzlJOGQ==",
"license": "MIT",
"dependencies": {
"@codemirror/language": "^6.0.0",
"@lezer/json": "^1.0.0"
}
},
"node_modules/@codemirror/language": {
"version": "6.11.0",
"resolved": "https://registry.npmjs.org/@codemirror/language/-/language-6.11.0.tgz",
"integrity": "sha512-A7+f++LodNNc1wGgoRDTt78cOwWm9KVezApgjOMp1W4hM0898nsqBXwF+sbePE7ZRcjN7Sa1Z5m2oN27XkmEjQ==",
"license": "MIT",
"dependencies": {
"@codemirror/state": "^6.0.0",
"@codemirror/view": "^6.23.0",
"@lezer/common": "^1.1.0",
"@lezer/highlight": "^1.0.0",
"@lezer/lr": "^1.0.0",
"style-mod": "^4.0.0"
}
},
"node_modules/@codemirror/lint": {
"version": "6.8.5",
"resolved": "https://registry.npmjs.org/@codemirror/lint/-/lint-6.8.5.tgz",
"integrity": "sha512-s3n3KisH7dx3vsoeGMxsbRAgKe4O1vbrnKBClm99PU0fWxmxsx5rR2PfqQgIt+2MMJBHbiJ5rfIdLYfB9NNvsA==",
"license": "MIT",
"dependencies": {
"@codemirror/state": "^6.0.0",
"@codemirror/view": "^6.35.0",
"crelt": "^1.0.5"
}
},
"node_modules/@codemirror/search": {
"version": "6.5.11",
"resolved": "https://registry.npmjs.org/@codemirror/search/-/search-6.5.11.tgz",
"integrity": "sha512-KmWepDE6jUdL6n8cAAqIpRmLPBZ5ZKnicE8oGU/s3QrAVID+0VhLFrzUucVKHG5035/BSykhExDL/Xm7dHthiA==",
"license": "MIT",
"dependencies": {
"@codemirror/state": "^6.0.0",
"@codemirror/view": "^6.0.0",
"crelt": "^1.0.5"
}
},
"node_modules/@codemirror/state": {
"version": "6.5.2",
"resolved": "https://registry.npmjs.org/@codemirror/state/-/state-6.5.2.tgz",
"integrity": "sha512-FVqsPqtPWKVVL3dPSxy8wEF/ymIEuVzF1PK3VbUgrxXpJUSHQWWZz4JMToquRxnkw+36LTamCZG2iua2Ptq0fA==",
"license": "MIT",
"dependencies": {
"@marijn/find-cluster-break": "^1.0.0"
}
},
"node_modules/@codemirror/theme-one-dark": {
"version": "6.1.2",
"resolved": "https://registry.npmjs.org/@codemirror/theme-one-dark/-/theme-one-dark-6.1.2.tgz",
"integrity": "sha512-F+sH0X16j/qFLMAfbciKTxVOwkdAS336b7AXTKOZhy8BR3eH/RelsnLgLFINrpST63mmN2OuwUt0W2ndUgYwUA==",
"license": "MIT",
"dependencies": {
"@codemirror/language": "^6.0.0",
"@codemirror/state": "^6.0.0",
"@codemirror/view": "^6.0.0",
"@lezer/highlight": "^1.0.0"
}
},
"node_modules/@codemirror/view": {
"version": "6.37.1",
"resolved": "https://registry.npmjs.org/@codemirror/view/-/view-6.37.1.tgz",
"integrity": "sha512-Qy4CAUwngy/VQkEz0XzMKVRcckQuqLYWKqVpDDDghBe5FSXSqfVrJn49nw3ePZHxRUz4nRmb05Lgi+9csWo4eg==",
"license": "MIT",
"dependencies": {
"@codemirror/state": "^6.5.0",
"crelt": "^1.0.6",
"style-mod": "^4.1.0",
"w3c-keyname": "^2.2.4"
}
},
"node_modules/@cspotcode/source-map-support": {
"version": "0.8.1",
"resolved": "https://registry.npmjs.org/@cspotcode/source-map-support/-/source-map-support-0.8.1.tgz",
"integrity": "sha512-IchNf6dN4tHoMFIn/7OE8LWZ19Y6q/67Bmf6vnGREv8RSbBVb9LPJxEcnwrcwX6ixSvaiGoomAUvu4YSxXrVgw==",
"dev": true,
"license": "MIT",
"dependencies": {
"@jridgewell/trace-mapping": "0.3.9"
},
"engines": {
"node": ">=12"
}
},
"node_modules/@cspotcode/source-map-support/node_modules/@jridgewell/trace-mapping": {
"version": "0.3.9",
"resolved": "https://registry.npmjs.org/@jridgewell/trace-mapping/-/trace-mapping-0.3.9.tgz",
"integrity": "sha512-3Belt6tdc8bPgAtbcmdtNJlirVoTmEb5e2gC94PnkwEW9jI6CAHUeoG85tjWP5WquqfavoMtMwiG4P926ZKKuQ==",
"dev": true,
"license": "MIT",
"dependencies": {
"@jridgewell/resolve-uri": "^3.0.3",
"@jridgewell/sourcemap-codec": "^1.4.10"
}
},
"node_modules/@ctrl/tinycolor": {
"version": "3.6.1",
"resolved": "https://registry.npmjs.org/@ctrl/tinycolor/-/tinycolor-3.6.1.tgz",
@ -1436,6 +1563,41 @@
"@jridgewell/sourcemap-codec": "^1.4.14"
}
},
"node_modules/@lezer/common": {
"version": "1.2.3",
"resolved": "https://registry.npmjs.org/@lezer/common/-/common-1.2.3.tgz",
"integrity": "sha512-w7ojc8ejBqr2REPsWxJjrMFsA/ysDCFICn8zEOR9mrqzOu2amhITYuLD8ag6XZf0CFXDrhKqw7+tW8cX66NaDA==",
"license": "MIT"
},
"node_modules/@lezer/highlight": {
"version": "1.2.1",
"resolved": "https://registry.npmjs.org/@lezer/highlight/-/highlight-1.2.1.tgz",
"integrity": "sha512-Z5duk4RN/3zuVO7Jq0pGLJ3qynpxUVsh7IbUbGj88+uV2ApSAn6kWg2au3iJb+0Zi7kKtqffIESgNcRXWZWmSA==",
"license": "MIT",
"dependencies": {
"@lezer/common": "^1.0.0"
}
},
"node_modules/@lezer/json": {
"version": "1.0.3",
"resolved": "https://registry.npmjs.org/@lezer/json/-/json-1.0.3.tgz",
"integrity": "sha512-BP9KzdF9Y35PDpv04r0VeSTKDeox5vVr3efE7eBbx3r4s3oNLfunchejZhjArmeieBH+nVOpgIiBJpEAv8ilqQ==",
"license": "MIT",
"dependencies": {
"@lezer/common": "^1.2.0",
"@lezer/highlight": "^1.0.0",
"@lezer/lr": "^1.0.0"
}
},
"node_modules/@lezer/lr": {
"version": "1.4.2",
"resolved": "https://registry.npmjs.org/@lezer/lr/-/lr-1.4.2.tgz",
"integrity": "sha512-pu0K1jCIdnQ12aWNaAVU5bzi7Bd1w54J3ECgANPmYLtQKP0HBj2cE/5coBD66MT10xbtIuUr7tg0Shbsvk0mDA==",
"license": "MIT",
"dependencies": {
"@lezer/common": "^1.0.0"
}
},
"node_modules/@malept/cross-spawn-promise": {
"version": "1.1.1",
"resolved": "https://registry.npmjs.org/@malept/cross-spawn-promise/-/cross-spawn-promise-1.1.1.tgz",
@ -1491,13 +1653,19 @@
"node": ">=10"
}
},
"node_modules/@marijn/find-cluster-break": {
"version": "1.0.2",
"resolved": "https://registry.npmjs.org/@marijn/find-cluster-break/-/find-cluster-break-1.0.2.tgz",
"integrity": "sha512-l0h88YhZFyKdXIFNfSWpyjStDjGHwZ/U7iobcK1cQQD8sejsONdQtTVU+1wVN1PBw40PiiHB1vA5S7VTfQiP9g==",
"license": "MIT"
},
"node_modules/@modelcontextprotocol/sdk": {
"version": "1.11.4",
"resolved": "https://registry.npmjs.org/@modelcontextprotocol/sdk/-/sdk-1.11.4.tgz",
"integrity": "sha512-OTbhe5slIjiOtLxXhKalkKGhIQrwvhgCDs/C2r8kcBTy5HR/g43aDQU0l7r8O0VGbJPTNJvDc7ZdQMdQDJXmbw==",
"version": "1.12.1",
"resolved": "https://registry.npmjs.org/@modelcontextprotocol/sdk/-/sdk-1.12.1.tgz",
"integrity": "sha512-KG1CZhZfWg+u8pxeM/mByJDScJSrjjxLc8fwQqbsS8xCjBmQfMNEBTotYdNanKekepnfRI85GtgQlctLFpcYPw==",
"license": "MIT",
"dependencies": {
"ajv": "^8.17.1",
"ajv": "^6.12.6",
"content-type": "^1.0.5",
"cors": "^2.8.5",
"cross-spawn": "^7.0.5",
@ -1513,6 +1681,28 @@
"node": ">=18"
}
},
"node_modules/@modelcontextprotocol/sdk/node_modules/ajv": {
"version": "6.12.6",
"resolved": "https://registry.npmjs.org/ajv/-/ajv-6.12.6.tgz",
"integrity": "sha512-j3fVLgvTo527anyYyJOGTYJbG+vnnQYvE0m5mmkc1TK+nxAppkCLMIL0aZ4dblVCNoGShhm+kzE4ZUykBoMg4g==",
"license": "MIT",
"dependencies": {
"fast-deep-equal": "^3.1.1",
"fast-json-stable-stringify": "^2.0.0",
"json-schema-traverse": "^0.4.1",
"uri-js": "^4.2.2"
},
"funding": {
"type": "github",
"url": "https://github.com/sponsors/epoberezkin"
}
},
"node_modules/@modelcontextprotocol/sdk/node_modules/json-schema-traverse": {
"version": "0.4.1",
"resolved": "https://registry.npmjs.org/json-schema-traverse/-/json-schema-traverse-0.4.1.tgz",
"integrity": "sha512-xbbCH5dCYU5T8LcEhhuh7HJ88HXuW3qsI3Y0zOZFKfZEHcpWiHU/Jxzk629Brsab/mMiHQti9wMP+845RPe3Vg==",
"license": "MIT"
},
"node_modules/@openmcp/electron": {
"resolved": "software",
"link": true
@ -3159,6 +3349,7 @@
"version": "8.17.1",
"resolved": "https://registry.npmjs.org/ajv/-/ajv-8.17.1.tgz",
"integrity": "sha512-B/gBuNg5SiMTrPkC+A2+cW0RszwxYmn6VYxB/inlBStS5nx6xHIt/ehKRhIMhqusl7a8LjQoZnjCs5vhwxOQ1g==",
"dev": true,
"license": "MIT",
"dependencies": {
"fast-deep-equal": "^3.1.3",
@ -4059,6 +4250,21 @@
"url": "https://github.com/sponsors/sindresorhus"
}
},
"node_modules/codemirror": {
"version": "6.0.1",
"resolved": "https://registry.npmjs.org/codemirror/-/codemirror-6.0.1.tgz",
"integrity": "sha512-J8j+nZ+CdWmIeFIGXEFbFPtpiYacFMDR8GlHK3IyHQJMCaVRfGx9NT+Hxivv1ckLWPvNdZqndbr/7lVhrf/Svg==",
"license": "MIT",
"dependencies": {
"@codemirror/autocomplete": "^6.0.0",
"@codemirror/commands": "^6.0.0",
"@codemirror/language": "^6.0.0",
"@codemirror/lint": "^6.0.0",
"@codemirror/search": "^6.0.0",
"@codemirror/state": "^6.0.0",
"@codemirror/view": "^6.0.0"
}
},
"node_modules/color-convert": {
"version": "2.0.1",
"resolved": "https://registry.npmjs.org/color-convert/-/color-convert-2.0.1.tgz",
@ -4392,6 +4598,19 @@
"node": ">= 10"
}
},
"node_modules/create-require": {
"version": "1.1.1",
"resolved": "https://registry.npmjs.org/create-require/-/create-require-1.1.1.tgz",
"integrity": "sha512-dcKFX3jn0MpIaXjisoRvexIJVEKzaq7z2rZKxf+MSr9TkdmHmsU4m2lcLojrj/FHl8mk5VxMmYA+ftRkP/3oKQ==",
"dev": true,
"license": "MIT"
},
"node_modules/crelt": {
"version": "1.0.6",
"resolved": "https://registry.npmjs.org/crelt/-/crelt-1.0.6.tgz",
"integrity": "sha512-VQ2MBenTq1fWZUH9DJNGti7kKv6EeAuYr3cLwxUWhIu1baTaXh4Ib5W2CqHVqib4/MqbYGJqiL3Zb8GJZr3l4g==",
"license": "MIT"
},
"node_modules/cross-spawn": {
"version": "7.0.6",
"resolved": "https://registry.npmjs.org/cross-spawn/-/cross-spawn-7.0.6.tgz",
@ -5695,7 +5914,6 @@
"version": "2.1.0",
"resolved": "https://registry.npmjs.org/fast-json-stable-stringify/-/fast-json-stable-stringify-2.1.0.tgz",
"integrity": "sha512-lhd/wF+Lk98HZoTCtlVraHtfh5XYijIjalXck7saUtuanSDyLMxnHhSXEDJqHxD7msR8D0uCmqlkwjCV8xvwHw==",
"dev": true,
"license": "MIT"
},
"node_modules/fast-redact": {
@ -5717,6 +5935,7 @@
"version": "3.0.6",
"resolved": "https://registry.npmjs.org/fast-uri/-/fast-uri-3.0.6.tgz",
"integrity": "sha512-Atfo14OibSv5wAp4VWNsFYE1AchQRTv9cBGWET4pZWHzYshFSS9NQI6I57rdKn9croWVMbYFbLhJ+yJvmZIIHw==",
"dev": true,
"funding": [
{
"type": "github",
@ -7200,6 +7419,7 @@
"version": "1.0.0",
"resolved": "https://registry.npmjs.org/json-schema-traverse/-/json-schema-traverse-1.0.0.tgz",
"integrity": "sha512-NM8/P9n3XjXhIZn1lLhkFaACTOURQXjWhV4BA/RnOv8xvgqtqpAX9IO4mRQxSx1Rlo4tqzeqb0sOlruaOy3dug==",
"dev": true,
"license": "MIT"
},
"node_modules/json-stringify-safe": {
@ -8221,19 +8441,10 @@
}
},
"node_modules/openai": {
"version": "4.100.0",
"resolved": "https://registry.npmjs.org/openai/-/openai-4.100.0.tgz",
"integrity": "sha512-9soq/wukv3utxcuD7TWFqKdKp0INWdeyhUCvxwrne5KwnxaCp4eHL4GdT/tMFhYolxgNhxFzg5GFwM331Z5CZg==",
"version": "5.0.1",
"resolved": "https://registry.npmjs.org/openai/-/openai-5.0.1.tgz",
"integrity": "sha512-Do6vxhbDv7cXhji/4ct1lrpZYMAOmjYbhyA9LJTuG7OfpbWMpuS+EIXkRT7R+XxpRB1OZhU/op4FU3p3uxU6gw==",
"license": "Apache-2.0",
"dependencies": {
"@types/node": "^18.11.18",
"@types/node-fetch": "^2.6.4",
"abort-controller": "^3.0.0",
"agentkeepalive": "^4.2.1",
"form-data-encoder": "1.7.2",
"formdata-node": "^4.3.2",
"node-fetch": "^2.6.7"
},
"bin": {
"openai": "bin/cli"
},
@ -8250,21 +8461,6 @@
}
}
},
"node_modules/openai/node_modules/@types/node": {
"version": "18.19.101",
"resolved": "https://registry.npmjs.org/@types/node/-/node-18.19.101.tgz",
"integrity": "sha512-Ykg7fcE3+cOQlLUv2Ds3zil6DVjriGQaSN/kEpl5HQ3DIGM6W0F2n9+GkWV4bRt7KjLymgzNdTnSKCbFUUJ7Kw==",
"license": "MIT",
"dependencies": {
"undici-types": "~5.26.4"
}
},
"node_modules/openai/node_modules/undici-types": {
"version": "5.26.5",
"resolved": "https://registry.npmjs.org/undici-types/-/undici-types-5.26.5.tgz",
"integrity": "sha512-JlCMO+ehdEIKqlFxk6IfVoAUVmgz7cU7zD/h9XZ0qzeosSHmUJVOzSQvvYSYWXkFXC+IfLKSIffhv0sVZup6pA==",
"license": "MIT"
},
"node_modules/opencollective-postinstall": {
"version": "2.0.3",
"resolved": "https://registry.npmjs.org/opencollective-postinstall/-/opencollective-postinstall-2.0.3.tgz",
@ -8797,7 +8993,6 @@
"version": "2.3.1",
"resolved": "https://registry.npmjs.org/punycode/-/punycode-2.3.1.tgz",
"integrity": "sha512-vYt7UD1U9Wg6138shLtLOvdAu+8DsC/ilFtEVHcH+wydcSpNE20AfSOduf6MkRFahL5FY7X1oU7nKVZFtfq8Fg==",
"dev": true,
"license": "MIT",
"engines": {
"node": ">=6"
@ -9030,6 +9225,7 @@
"version": "2.0.2",
"resolved": "https://registry.npmjs.org/require-from-string/-/require-from-string-2.0.2.tgz",
"integrity": "sha512-Xf0nWe6RseziFMu+Ap9biiUbmplq6S9/p+7w7YXP/JBHhrUDDUhwa+vANyubuqfZWTveU//DYVGsDG7RKL/vEw==",
"dev": true,
"license": "MIT",
"engines": {
"node": ">=0.10.0"
@ -9823,6 +10019,12 @@
"url": "https://github.com/sponsors/sindresorhus"
}
},
"node_modules/style-mod": {
"version": "4.1.2",
"resolved": "https://registry.npmjs.org/style-mod/-/style-mod-4.1.2.tgz",
"integrity": "sha512-wnD1HyVqpJUI2+eKZ+eo1UwghftP6yuFheBqqe+bWCotBjC2K1YnteJILRMs3SM4V/0dLEW1SC27MWP5y+mwmw==",
"license": "MIT"
},
"node_modules/sumchecker": {
"version": "3.0.1",
"resolved": "https://registry.npmjs.org/sumchecker/-/sumchecker-3.0.1.tgz",
@ -10411,7 +10613,6 @@
"version": "4.4.1",
"resolved": "https://registry.npmjs.org/uri-js/-/uri-js-4.4.1.tgz",
"integrity": "sha512-7rKUyy33Q1yc98pQ1DAmLtwX109F7TIfWlW1Ydo8Wl1ii1SeHieeh0HHfPeL2fMXK6z0s8ecKs9frCuLJvndBg==",
"dev": true,
"license": "BSD-2-Clause",
"dependencies": {
"punycode": "^2.1.0"
@ -10563,6 +10764,12 @@
"typescript": ">=5.0.0"
}
},
"node_modules/w3c-keyname": {
"version": "2.2.8",
"resolved": "https://registry.npmjs.org/w3c-keyname/-/w3c-keyname-2.2.8.tgz",
"integrity": "sha512-dpojBhNsCNN7T82Tm7k26A6G9ML3NkhDsnw9n/eoxSRlVBB4CEtIQ/KTCLI2Fwf3ataSXRhYFkQi3SlnFwPvPQ==",
"license": "MIT"
},
"node_modules/wasm-feature-detect": {
"version": "1.8.0",
"resolved": "https://registry.npmjs.org/wasm-feature-detect/-/wasm-feature-detect-1.8.0.tgz",
@ -11027,6 +11234,10 @@
"name": "@openmcp/renderer",
"version": "0.1.0",
"dependencies": {
"@codemirror/autocomplete": "^6.18.6",
"@codemirror/lang-json": "^6.0.1",
"@codemirror/theme-one-dark": "^6.1.2",
"codemirror": "^6.0.1",
"core-js": "^3.8.3",
"element-plus": "^2.9.9",
"katex": "^0.16.21",
@ -11107,6 +11318,51 @@
"node": ">=14.14"
}
},
"renderer/node_modules/openai": {
"version": "4.104.0",
"resolved": "https://registry.npmjs.org/openai/-/openai-4.104.0.tgz",
"integrity": "sha512-p99EFNsA/yX6UhVO93f5kJsDRLAg+CTA2RBqdHK4RtK8u5IJw32Hyb2dTGKbnnFmnuoBv5r7Z2CURI9sGZpSuA==",
"license": "Apache-2.0",
"dependencies": {
"@types/node": "^18.11.18",
"@types/node-fetch": "^2.6.4",
"abort-controller": "^3.0.0",
"agentkeepalive": "^4.2.1",
"form-data-encoder": "1.7.2",
"formdata-node": "^4.3.2",
"node-fetch": "^2.6.7"
},
"bin": {
"openai": "bin/cli"
},
"peerDependencies": {
"ws": "^8.18.0",
"zod": "^3.23.8"
},
"peerDependenciesMeta": {
"ws": {
"optional": true
},
"zod": {
"optional": true
}
}
},
"renderer/node_modules/openai/node_modules/@types/node": {
"version": "18.19.110",
"resolved": "https://registry.npmjs.org/@types/node/-/node-18.19.110.tgz",
"integrity": "sha512-WW2o4gTmREtSnqKty9nhqF/vA0GKd0V/rbC0OyjSk9Bz6bzlsXKT+i7WDdS/a0z74rfT2PO4dArVCSnapNLA5Q==",
"license": "MIT",
"dependencies": {
"undici-types": "~5.26.4"
}
},
"renderer/node_modules/openai/node_modules/undici-types": {
"version": "5.26.5",
"resolved": "https://registry.npmjs.org/undici-types/-/undici-types-5.26.5.tgz",
"integrity": "sha512-JlCMO+ehdEIKqlFxk6IfVoAUVmgz7cU7zD/h9XZ0qzeosSHmUJVOzSQvvYSYWXkFXC+IfLKSIffhv0sVZup6pA==",
"license": "MIT"
},
"renderer/node_modules/vite": {
"version": "6.3.5",
"resolved": "https://registry.npmjs.org/vite/-/vite-6.3.5.tgz",
@ -11262,10 +11518,11 @@
"version": "0.0.1",
"license": "MIT",
"dependencies": {
"@modelcontextprotocol/sdk": "^1.10.2",
"@modelcontextprotocol/sdk": "^1.12.1",
"@seald-io/nedb": "^4.1.1",
"open": "^10.1.2",
"openai": "^4.96.0",
"axios": "^1.9.0",
"openai": "^5.0.1",
"pako": "^2.1.0",
"pino": "^9.6.0",
"pino-pretty": "^13.0.0",
@ -11445,6 +11702,51 @@
"electron-builder": "^24.13.3",
"typescript": "^5.6.3"
}
},
"software/node_modules/@types/node": {
"version": "18.19.110",
"resolved": "https://registry.npmjs.org/@types/node/-/node-18.19.110.tgz",
"integrity": "sha512-WW2o4gTmREtSnqKty9nhqF/vA0GKd0V/rbC0OyjSk9Bz6bzlsXKT+i7WDdS/a0z74rfT2PO4dArVCSnapNLA5Q==",
"license": "MIT",
"dependencies": {
"undici-types": "~5.26.4"
}
},
"software/node_modules/openai": {
"version": "4.104.0",
"resolved": "https://registry.npmjs.org/openai/-/openai-4.104.0.tgz",
"integrity": "sha512-p99EFNsA/yX6UhVO93f5kJsDRLAg+CTA2RBqdHK4RtK8u5IJw32Hyb2dTGKbnnFmnuoBv5r7Z2CURI9sGZpSuA==",
"license": "Apache-2.0",
"dependencies": {
"@types/node": "^18.11.18",
"@types/node-fetch": "^2.6.4",
"abort-controller": "^3.0.0",
"agentkeepalive": "^4.2.1",
"form-data-encoder": "1.7.2",
"formdata-node": "^4.3.2",
"node-fetch": "^2.6.7"
},
"bin": {
"openai": "bin/cli"
},
"peerDependencies": {
"ws": "^8.18.0",
"zod": "^3.23.8"
},
"peerDependenciesMeta": {
"ws": {
"optional": true
},
"zod": {
"optional": true
}
}
},
"software/node_modules/undici-types": {
"version": "5.26.5",
"resolved": "https://registry.npmjs.org/undici-types/-/undici-types-5.26.5.tgz",
"integrity": "sha512-JlCMO+ehdEIKqlFxk6IfVoAUVmgz7cU7zD/h9XZ0qzeosSHmUJVOzSQvvYSYWXkFXC+IfLKSIffhv0sVZup6pA==",
"license": "MIT"
}
}
}

View File

@ -2,7 +2,7 @@
"name": "openmcp",
"displayName": "OpenMCP",
"description": "An all in one MCP Client/TestTool",
"version": "0.1.1",
"version": "0.1.4",
"publisher": "kirigaya",
"author": {
"name": "kirigaya",
@ -233,11 +233,11 @@
"build:task-loop": "webpack --config webpack/webpack.task-loop.js"
},
"dependencies": {
"@modelcontextprotocol/sdk": "^1.10.2",
"@modelcontextprotocol/sdk": "^1.12.1",
"@seald-io/nedb": "^4.1.1",
"axios": "^1.7.7",
"axios": "^1.9.0",
"bson": "^6.8.0",
"openai": "^4.93.0",
"openai": "^5.0.1",
"pako": "^2.1.0",
"tesseract.js": "^6.0.1",
"uuid": "^11.1.0",

View File

@ -16,6 +16,10 @@
"type-check": "vue-tsc --build"
},
"dependencies": {
"@codemirror/autocomplete": "^6.18.6",
"@codemirror/lang-json": "^6.0.1",
"@codemirror/theme-one-dark": "^6.1.2",
"codemirror": "^6.0.1",
"core-js": "^3.8.3",
"element-plus": "^2.9.9",
"katex": "^0.16.21",

Binary file not shown.

Before

Width:  |  Height:  |  Size: 54 KiB

After

Width:  |  Height:  |  Size: 62 KiB

View File

@ -1,8 +1,8 @@
@font-face {
font-family: "iconfont"; /* Project id 4870215 */
src: url('iconfont.woff2?t=1747820198035') format('woff2'),
url('iconfont.woff?t=1747820198035') format('woff'),
url('iconfont.ttf?t=1747820198035') format('truetype');
src: url('iconfont.woff2?t=1748859145515') format('woff2'),
url('iconfont.woff?t=1748859145515') format('woff'),
url('iconfont.ttf?t=1748859145515') format('truetype');
}
.iconfont {
@ -13,6 +13,10 @@
-moz-osx-font-smoothing: grayscale;
}
.icon-proxy:before {
content: "\e723";
}
.icon-parallel:before {
content: "\e61d";
}

Binary file not shown.

View File

@ -222,3 +222,9 @@ a {
.el-dropdown-menu__item:hover {
background-color: var(--background) !important;
}
/* codemirror */
.ͼo,
.ͼo .cm-gutters {
background-color: transparent !important;
}

View File

@ -1,147 +1,199 @@
<template>
<div class="k-input-object">
<textarea ref="textareaRef" v-model="inputValue" class="k-input-object__textarea"
:class="{ 'is-invalid': isInvalid }" @input="handleInput" @blur="handleBlur"
@keydown="handleKeydown"
:placeholder="props.placeholder"
></textarea>
</div>
<div v-if="errorMessage" class="k-input-object__error">
{{ errorMessage }}
<div :ref="el => editorContainer = el" class="k-input-object__editor"></div>
<div v-if="errorMessage" class="k-input-object__error">
{{ errorMessage }}
</div>
</div>
</template>
<script lang="ts">
import { defineComponent, ref, watch, nextTick } from 'vue';
import { debounce } from 'lodash';
<script lang="ts" setup>
import { ref, onMounted, watch, type PropType } from 'vue'
import { EditorView, basicSetup } from 'codemirror'
import type { Completion, CompletionContext } from "@codemirror/autocomplete"
import { jsonLanguage } from "@codemirror/lang-json"
export default defineComponent({
name: 'KInputObject',
props: {
modelValue: {
type: Object,
default: () => ({})
},
placeholder: {
type: String,
default: '请输入 JSON 对象'
},
debounceTime: {
type: Number,
default: 500
}
import { json } from '@codemirror/lang-json'
import { oneDark } from '@codemirror/theme-one-dark'
import { debounce } from 'lodash'
const props = defineProps({
modelValue: {
type: Object,
default: () => ({})
},
emits: ['update:modelValue', 'parse-error'],
setup(props, { emit }) {
const textareaRef = ref<HTMLTextAreaElement | null>(null)
const inputValue = ref<string>(JSON.stringify(props.modelValue, null, 2))
const isInvalid = ref<boolean>(false)
const errorMessage = ref<string>('')
//
const debouncedParse = debounce((value: string) => {
if (value.trim() === '') {
errorMessage.value = '';
isInvalid.value = false;
emit('update:modelValue', undefined);
return;
}
try {
const parsed = JSON.parse(value);
isInvalid.value = false;
errorMessage.value = '';
emit('update:modelValue', parsed);
} catch (error) {
isInvalid.value = true;
errorMessage.value = 'JSON 解析错误: ' + (error as Error).message;
emit('parse-error', error);
}
}, props.debounceTime)
const handleInput = () => {
debouncedParse(inputValue.value)
}
const handleBlur = () => {
//
debouncedParse.flush()
}
// modelValue
watch(
() => props.modelValue,
(newVal) => {
const currentParsed = tryParse(inputValue.value)
if (!isDeepEqual(currentParsed, newVal)) {
inputValue.value = JSON.stringify(newVal, null, 2)
}
},
{ deep: true }
)
// JSON
const tryParse = (value: string): any => {
try {
return JSON.parse(value)
} catch {
return undefined
}
}
//
const isDeepEqual = (obj1: any, obj2: any): boolean => {
return JSON.stringify(obj1) === JSON.stringify(obj2)
}
const handleKeydown = (event: KeyboardEvent) => {
if (event.key === '{') {
event.preventDefault();
const start = textareaRef.value!.selectionStart;
const end = textareaRef.value!.selectionEnd;
const value = inputValue.value;
const newValue = value.substring(0, start) + '{\n \n}' + value.substring(end);
inputValue.value = newValue;
nextTick(() => {
textareaRef.value!.setSelectionRange(start + 2, start + 2);
});
} else if (event.key === '"') {
event.preventDefault();
const start = textareaRef.value!.selectionStart;
const end = textareaRef.value!.selectionEnd;
const value = inputValue.value;
const newValue = value.substring(0, start) + '""' + value.substring(end);
inputValue.value = newValue;
nextTick(() => {
textareaRef.value!.setSelectionRange(start + 1, start + 1);
});
} else if (event.key === 'Tab') {
event.preventDefault();
const start = textareaRef.value!.selectionStart;
const end = textareaRef.value!.selectionEnd;
const value = inputValue.value;
const newValue = value.substring(0, start) + ' ' + value.substring(end);
inputValue.value = newValue;
nextTick(() => {
textareaRef.value!.setSelectionRange(start + 1, start + 1);
});
} else if (event.key === 'Enter' && inputValue.value.trim() === '') {
event.preventDefault();
inputValue.value = '{}';
}
};
return {
textareaRef,
inputValue,
isInvalid,
errorMessage,
handleInput,
handleBlur,
handleKeydown,
props
}
placeholder: {
type: String,
default: '请输入 JSON 对象'
},
debounceTime: {
type: Number,
default: 500
},
schema: {
type: Object as PropType<{
type?: string;
properties?: Record<string, {
type: string;
description?: string;
default?: any;
enum?: any[];
}>;
required?: string[];
}>,
default: () => ({})
}
})
const emit = defineEmits(['update:modelValue', 'parse-error'])
const editorContainer = ref<any>(null);
const editorView = ref<EditorView | null>(null);
const isInvalid = ref(false);
const errorMessage = ref('');
const inputValue = ref<string>(JSON.stringify(props.modelValue, null, 2));
//
const debouncedParse = debounce((value: string) => {
if (value.trim() === '') {
errorMessage.value = '';
isInvalid.value = false;
emit('update:modelValue', undefined);
return;
}
try {
const parsed = JSON.parse(value);
isInvalid.value = false;
errorMessage.value = '';
emit('update:modelValue', parsed);
} catch (error) {
isInvalid.value = true;
errorMessage.value = 'JSON 解析错误: ' + (error as Error).message;
emit('parse-error', error);
}
}, props.debounceTime);
onMounted(() => {
if (editorContainer.value) {
const extensions = [
basicSetup,
json(),
oneDark,
EditorView.updateListener.of(update => {
if (update.docChanged) {
const value = update.state.doc.toString()
debouncedParse(value)
}
})
]
// schema
if (Object.keys(props.schema).length > 0) {
extensions.push(
jsonLanguage.data.of({
autocomplete: getJsonCompletion(props.schema)
})
)
}
editorView.value = new EditorView({
doc: JSON.stringify(props.modelValue, null, 2),
extensions,
parent: editorContainer.value
})
}
})
//
function getJsonCompletion(schema: any) {
return (context: CompletionContext) => {
//
const charBefore = context.state.sliceDoc(context.pos - 1, context.pos)
if (/[,.{}[\]:]/.test(charBefore)) return null
const word = context.matchBefore(/\w*/)
if (!word) return null
//
const state = context.state
const pos = context.pos
const line = state.doc.lineAt(pos)
const textBefore = line.text.slice(0, pos - line.from)
//
const quoteCount = (textBefore.match(/"/g) || []).length
if (quoteCount % 2 !== 0) return null
const completions: Completion[] = []
//
if (schema.properties) {
Object.entries(schema.properties).forEach(([key, value]) => {
completions.push({
label: key,
type: "property",
apply: `"${key}": ${getDefaultValue(value as any)}`
})
})
}
return {
from: word.from,
options: completions,
validFor: /^\w*$/
}
}
}
//
function getDefaultValue(property: any): string {
if (property.default !== undefined) {
return JSON.stringify(property.default)
}
switch (property.type) {
case 'string': return '""'
case 'number': return '0'
case 'boolean': return 'false'
case 'object': return '{}'
case 'array': return '[]'
default: return 'null'
}
}
// watch
// watch(
// () => props.modelValue,
// (newVal) => {
// const currentParsed = tryParse(inputValue.value)
// if (!isDeepEqual(currentParsed, newVal)) {
// const newContent = JSON.stringify(newVal, null, 2)
// editorView.value?.dispatch({
// changes: {
// from: 0,
// to: editorView.value.state.doc.length,
// insert: newContent
// }
// })
// }
// },
// { deep: true }
// )
// JSON
const tryParse = (value: string): any => {
try {
return JSON.parse(value)
} catch {
return undefined
}
}
//
const isDeepEqual = (obj1: any, obj2: any): boolean => {
return JSON.stringify(obj1) === JSON.stringify(obj2)
}
</script>
<style scoped>
@ -151,6 +203,7 @@ export default defineComponent({
border-radius: .5em;
margin-bottom: 15px;
display: flex;
flex-direction: column;
}
.k-input-object__textarea {
@ -174,6 +227,24 @@ export default defineComponent({
border-color: var(--el-color-error);
}
.k-input-object__error {
color: var(--el-color-error);
font-size: 12px;
margin-top: 4px;
}
.k-input-object__editor {
width: 100%;
border: 1px solid var(--el-border-color-light);
border-radius: 4px;
overflow: hidden;
background-color: var(--el-bg-color-overlay);
}
.k-input-object__editor.is-invalid {
border-color: var(--el-color-error);
}
.k-input-object__error {
color: var(--el-color-error);
font-size: 12px;

View File

@ -14,7 +14,8 @@ export enum MessageState {
ToolCall = 'tool call failed',
None = 'none',
Success = 'success',
ParseJsonError = 'parse json error'
ParseJsonError = 'parse json error',
NoToolFunction = 'no tool function',
}
export interface IExtraInfo {
@ -69,15 +70,7 @@ export interface ChatStorage {
settings: ChatSetting
}
export interface ToolCall {
id?: string;
index?: number;
type: string;
function: {
name: string;
arguments: string;
}
}
export type ToolCall = OpenAI.Chat.Completions.ChatCompletionChunk.Choice.Delta.ToolCall;
interface PromptTextItem {
type: 'prompt'

View File

@ -4,6 +4,7 @@
<div class="input-wrapper">
<KRichTextarea
:ref="el => editorRef = el"
:tabId="tabId"
v-model="userInput"
:placeholder="t('enter-message-dot')"
@ -43,6 +44,7 @@ const props = defineProps({
});
const emits = defineEmits(['update:scrollToBottom']);
const editorRef = ref<any>(null);
const tab = tabs.content[props.tabId];
const tabStorage = tab.storage as ChatStorage;
@ -84,6 +86,7 @@ function clearErrorMessage(errorMessage: string) {
}
function handleSend(newMessage?: string) {
//
const userMessage = newMessage || userInput.value;
@ -115,8 +118,6 @@ function handleSend(newMessage?: string) {
}
});
}
isLoading.value = false;
});
loop.registerOnChunk(() => {
@ -124,7 +125,6 @@ function handleSend(newMessage?: string) {
});
loop.registerOnDone(() => {
isLoading.value = false;
scrollToBottom();
});
@ -133,9 +133,16 @@ function handleSend(newMessage?: string) {
scrollToBottom();
});
loop.start(tabStorage, userMessage);
loop.start(tabStorage, userMessage).then(() => {
isLoading.value = false;
});
//
userInput.value = '';
const editor = editorRef.value.editor;
if (editor) {
editor.innerHTML = '';
}
}
function handleAbort() {

View File

@ -117,6 +117,12 @@ function extractTextFromCollection(collection: HTMLCollection) {
const isComposing = ref(false);
defineExpose({
editor,
handleBackspace,
handleInput,
});
function handleKeydown(event: KeyboardEvent) {
if (event.key === 'Enter' && !event.shiftKey && !isComposing.value) {

View File

@ -1,16 +1,32 @@
import type { ToolCallContent, ToolCallResponse } from "@/hook/type";
import { MessageState, type ToolCall } from "../chat-box/chat";
import { mcpClientAdapter } from "@/views/connect/core";
import type { BasicLlmDescription } from "@/views/setting/llm";
import { redLog } from "@/views/setting/util";
export interface ToolCallResult {
state: MessageState;
content: ToolCallContent[];
}
export type IToolCallIndex = number;
export async function handleToolCalls(toolCall: ToolCall): Promise<ToolCallResult> {
if (!toolCall.function) {
return {
content: [{
type: 'error',
text: 'no tool function'
}],
state: MessageState.NoToolFunction
}
}
// 反序列化 streaming 来的参数字符串
const toolName = toolCall.function.name;
const argsResult = deserializeToolCallResponse(toolCall.function.arguments);
// TODO: check as string
const toolName = toolCall.function.name as string;
const argsResult = deserializeToolCallResponse(toolCall.function.arguments as string);
if (argsResult.error) {
return {
@ -47,8 +63,7 @@ function deserializeToolCallResponse(toolArgs: string) {
function handleToolResponse(toolResponse: ToolCallResponse) {
if (typeof toolResponse === 'string') {
// 如果是 string说明是错误信息
console.log(toolResponse);
redLog('error happen' + JSON.stringify(toolResponse));
return {
content: [{
@ -84,3 +99,37 @@ function parseErrorObject(error: any): string {
return error.toString();
}
}
function grokIndexAdapter(toolCall: ToolCall, callId2Index: Map<string, number>): IToolCallIndex {
// grok 采用 id 作为 index需要将 id 映射到 zero-based 的 index
if (!toolCall.id) {
return 0;
}
if (!callId2Index.has(toolCall.id)) {
callId2Index.set(toolCall.id, callId2Index.size);
}
return callId2Index.get(toolCall.id)!;
}
function geminiIndexAdapter(toolCall: ToolCall): IToolCallIndex {
// TODO: 等待后续支持
return 0;
}
function defaultIndexAdapter(toolCall: ToolCall): IToolCallIndex {
return toolCall.index || 0;
}
export function getToolCallIndexAdapter(llm: BasicLlmDescription) {
if (llm.userModel.startsWith('gemini')) {
return geminiIndexAdapter;
}
if (llm.userModel.startsWith('grok')) {
const callId2Index = new Map<string, number>();
return (toolCall: ToolCall) => grokIndexAdapter(toolCall, callId2Index);
}
return defaultIndexAdapter;
}

View File

@ -3,15 +3,16 @@ import { ref, type Ref } from "vue";
import { type ToolCall, type ChatStorage, getToolSchema, MessageState } from "../chat-box/chat";
import { useMessageBridge, MessageBridge, createMessageBridge } from "@/api/message-bridge";
import type { OpenAI } from 'openai';
import { llmManager, llms } from "@/views/setting/llm";
import { llmManager, llms, type BasicLlmDescription } from "@/views/setting/llm";
import { pinkLog, redLog } from "@/views/setting/util";
import { ElMessage } from "element-plus";
import { handleToolCalls, type ToolCallResult } from "./handle-tool-calls";
import { getToolCallIndexAdapter, handleToolCalls, type IToolCallIndex, type ToolCallResult } from "./handle-tool-calls";
import { getPlatform } from "@/api/platform";
import { getSystemPrompt } from "../chat-box/options/system-prompt";
import { mcpSetting } from "@/hook/mcp";
export type ChatCompletionChunk = OpenAI.Chat.Completions.ChatCompletionChunk;
export type ChatCompletionCreateParamsBase = OpenAI.Chat.Completions.ChatCompletionCreateParams & { id?: string };
export type ChatCompletionCreateParamsBase = OpenAI.Chat.Completions.ChatCompletionCreateParams & { id?: string, proxyServer?: string };
export interface TaskLoopOptions {
maxEpochs?: number;
maxJsonParseRetry?: number;
@ -44,7 +45,7 @@ export class TaskLoop {
private onToolCalled: (toolCallResult: ToolCallResult) => ToolCallResult = toolCallResult => toolCallResult;
private onEpoch: () => void = () => {};
private completionUsage: ChatCompletionChunk['usage'] | undefined;
private llmConfig: any;
private llmConfig?: BasicLlmDescription;
constructor(
private readonly taskOptions: TaskLoopOptions = { maxEpochs: 20, maxJsonParseRetry: 3, adapter: undefined },
@ -75,17 +76,23 @@ export class TaskLoop {
}
}
private handleChunkDeltaToolCalls(chunk: ChatCompletionChunk) {
private handleChunkDeltaToolCalls(chunk: ChatCompletionChunk, toolcallIndexAdapter: (toolCall: ToolCall) => IToolCallIndex) {
const toolCall = chunk.choices[0]?.delta?.tool_calls?.[0];
if (toolCall) {
const currentCall = this.streamingToolCalls.value[toolCall.index];
if (toolCall.index === undefined || toolCall.index === null) {
console.warn('tool_call.index is undefined or null');
}
const index = toolcallIndexAdapter(toolCall);
const currentCall = this.streamingToolCalls.value[index];
if (currentCall === undefined) {
// 新的工具调用开始
this.streamingToolCalls.value[toolCall.index] = {
this.streamingToolCalls.value[index] = {
id: toolCall.id,
index: toolCall.index,
index,
type: 'function',
function: {
name: toolCall.function?.name || '',
@ -99,10 +106,10 @@ export class TaskLoop {
currentCall.id = toolCall.id;
}
if (toolCall.function?.name) {
currentCall.function.name = toolCall.function.name;
currentCall.function!.name = toolCall.function.name;
}
if (toolCall.function?.arguments) {
currentCall.function.arguments += toolCall.function.arguments;
currentCall.function!.arguments += toolCall.function.arguments;
}
}
}
@ -117,16 +124,18 @@ export class TaskLoop {
}
}
private doConversation(chatData: ChatCompletionCreateParamsBase) {
private doConversation(chatData: ChatCompletionCreateParamsBase, toolcallIndexAdapter: (toolCall: ToolCall) => IToolCallIndex) {
return new Promise<IDoConversationResult>((resolve, reject) => {
const chunkHandler = this.bridge.addCommandListener('llm/chat/completions/chunk', data => {
// data.code 一定为 200否则不会走这个 route
const { chunk } = data.msg as { chunk: ChatCompletionChunk };
console.log(chunk);
// 处理增量的 content 和 tool_calls
this.handleChunkDeltaContent(chunk);
this.handleChunkDeltaToolCalls(chunk);
this.handleChunkDeltaToolCalls(chunk, toolcallIndexAdapter);
this.handleChunkUsage(chunk);
this.onChunk(chunk);
@ -181,6 +190,7 @@ export class TaskLoop {
const temperature = tabStorage.settings.temperature;
const tools = getToolSchema(tabStorage.settings.enableTools);
const parallelToolCalls = tabStorage.settings.parallelToolCalls;
const proxyServer = mcpSetting.proxyServer || '';
const userMessages = [];
@ -211,6 +221,7 @@ export class TaskLoop {
tools,
parallelToolCalls,
messages: userMessages,
proxyServer
} as ChatCompletionCreateParamsBase;
return chatData;
@ -342,9 +353,11 @@ export class TaskLoop {
}
this.currentChatId = chatData.id!;
const llm = this.getLlmConfig();
const toolcallIndexAdapter = getToolCallIndexAdapter(llm);
// 发送请求
const doConverationResult = await this.doConversation(chatData);
const doConverationResult = await this.doConversation(chatData, toolcallIndexAdapter);
console.log('[doConverationResult] Response');
console.log(doConverationResult);
@ -395,8 +408,8 @@ export class TaskLoop {
} else if (toolCallResult.state === MessageState.Success) {
tabStorage.messages.push({
role: 'tool',
index: toolCall.index || 0,
tool_call_id: toolCall.id || toolCall.function.name,
index: toolcallIndexAdapter(toolCall),
tool_call_id: toolCall.id || '',
content: toolCallResult.content,
extraInfo: {
created: Date.now(),
@ -409,8 +422,8 @@ export class TaskLoop {
tabStorage.messages.push({
role: 'tool',
index: toolCall.index || 0,
tool_call_id: toolCall.id || toolCall.function.name,
index: toolcallIndexAdapter(toolCall),
tool_call_id: toolCall.id || toolCall.function!.name,
content: toolCallResult.content,
extraInfo: {
created: Date.now(),

View File

@ -20,7 +20,7 @@
<span class="tool-name">
<span class="iconfont icon-tool"></span>
{{ props.message.tool_calls[0].function.name }}
{{ props.message.tool_calls[0].function!.name }}
</span>
<el-button size="small" @click="createTest(props.message.tool_calls[0])">
<span class="iconfont icon-send"></span>
@ -37,7 +37,7 @@
<span class="tool-name">
<span class="iconfont icon-tool"></span>
{{ props.message.tool_calls[toolIndex].function.name }}
{{ props.message.tool_calls[toolIndex].function!.name }}
</span>
<el-button size="small" @click="createTest(props.message.tool_calls[toolIndex])">
<span class="iconfont icon-send"></span>
@ -46,7 +46,7 @@
</div>
<div class="tool-arguments">
<json-render :json="props.message.tool_calls[toolIndex].function.arguments"/>
<json-render :json="props.message.tool_calls[toolIndex].function!.arguments"/>
</div>
<!-- 工具调用结果 -->

View File

@ -16,6 +16,9 @@
<el-button @click="copy">
<span class="iconfont icon-copy"></span>
</el-button>
<el-button @click="reload">
<span class="iconfont icon-restart"></span>
</el-button>
<el-button @click="toggleEdit">
<span class="iconfont icon-edit2"></span>
</el-button>
@ -91,6 +94,16 @@ const copy = async () => {
}
};
const reload = async () => {
const index = tabStorage.messages.findIndex(msg => msg.extraInfo === props.message.extraInfo);
if (index !== -1 && chatContext.handleSend) {
// index index
tabStorage.messages.splice(index);
chatContext.handleSend(props.message.content);
}
};
</script>
<style>

View File

@ -60,7 +60,7 @@ export function createTab(type: string, index: number): Tab {
if (customName !== null) {
return customName;
}
return t('blank-test') + ` ${index}`;
return t('blank-test');
},
set name(value: string) {
customName = value; // 允许外部修改 name
@ -85,8 +85,6 @@ export function addNewTab() {
export function closeTab(index: number) {
if (tabs.content.length <= 1) return; // 至少保留一个标签页
tabs.content.splice(index, 1);
console.log(tabs.content);
@ -95,4 +93,8 @@ export function closeTab(index: number) {
if (tabs.activeIndex >= index) {
tabs.activeIndex = Math.max(0, tabs.activeIndex - 1);
}
if (tabs.content.length === 0) {
addNewTab();
}
}

View File

@ -38,6 +38,7 @@
<k-input-object
v-else-if="property.type === 'object'"
v-model="tabStorage.formData[name]"
:schema="property"
:placeholder="property.description || t('enter') + ' ' + (property.title || name)"
/>
</el-form-item>
@ -84,22 +85,24 @@ if (!tabStorage.formData) {
tabStorage.formData = {};
}
console.log(tabStorage.formData);
const formRef = ref<FormInstance>();
const loading = ref(false);
const currentTool = computed(() => {
for (const client of mcpClientAdapter.clients) {
const tool = client.tools?.get(tabStorage.currentToolName);
if (tool) return tool;
if (tool) {
console.log(tool);
return tool;
}
}
});
const formRules = computed<FormRules>(() => {
const rules: FormRules = {};
if (!currentTool.value?.inputSchema?.properties) return rules;
Object.entries(currentTool.value.inputSchema.properties).forEach(([name, property]) => {
@ -127,8 +130,6 @@ const initFormData = () => {
const newSchemaDataForm: Record<string, number | boolean | string | object> = {};
console.log(currentTool.value.inputSchema.properties);
Object.entries(currentTool.value.inputSchema.properties).forEach(([name, property]) => {
newSchemaDataForm[name] = getDefaultValue(property);
const originType = normaliseJavascriptType(typeof tabStorage.formData[name]);

View File

@ -176,9 +176,10 @@ export class MacroColor {
// 额外支持 trae 的默认主题
const sidebarColorString = this.rootStyles.getPropertyValue('--sidebar');
if (sidebarColorString === backgroundColorString) {
// trae 默认主题的特点sidebarColorString 和 backgroundColorString 一样
// 把 默认主题的特点sidebarColorString 的颜色加深一些
const newSidebarColor = this.theme === 'dark' ? '#252a38' : '#edeff2';
document.documentElement.style.setProperty('--sidebar', newSidebarColor);
pinkLog('修改 sidebar 颜色为' + newSidebarColor);
document.documentElement.style.setProperty('--sidebar', 'var(--vscode-icube-colorBg2)');
}
}

View File

@ -27,10 +27,11 @@ export function normaliseJavascriptType(type: string) {
case 'string':
return 'string';
default:
return 'string';
return type;
}
}
export const mcpSetting = reactive({
timeout: 60,
proxyServer: '',
});

View File

@ -20,6 +20,7 @@ export async function loadSetting() {
llmManager.currentModelIndex = persistConfig.MODEL_INDEX || 0;
I18n.global.locale.value = persistConfig.LANG || 'zh';
mcpSetting.timeout = persistConfig.MCP_TIMEOUT_SEC || 60;
mcpSetting.proxyServer = persistConfig.PROXY_SERVER || '';
persistConfig.LLM_INFO.forEach((element: any) => {
llms.push(element);
@ -51,7 +52,8 @@ export function saveSetting(saveHandler?: () => void) {
MODEL_INDEX: llmManager.currentModelIndex,
LLM_INFO: JSON.parse(JSON.stringify(llms)),
LANG: I18n.global.locale.value,
MCP_TIMEOUT_SEC: mcpSetting.timeout
MCP_TIMEOUT_SEC: mcpSetting.timeout,
PROXY_SERVER: mcpSetting.proxyServer
};
bridge.addCommandListener('setting/save', data => {

View File

@ -10,6 +10,7 @@ export interface InputSchema {
properties: Record<string, SchemaProperty>;
required?: string[];
title?: string;
$defs?: any;
}
export interface Argument {
@ -40,6 +41,7 @@ export interface ToolItem {
name: string;
description: string;
inputSchema: InputSchema;
anyOf?: any;
}
export interface ToolsListResponse {
tools: ToolItem[]

View File

@ -156,5 +156,7 @@
"error": "خطأ",
"feedback": "تعليقات",
"waiting-mcp-server": "في انتظار استجابة خادم MCP",
"parallel-tool-calls": "السماح للنموذج باستدعاء أدوات متعددة في رد واحد"
"parallel-tool-calls": "السماح للنموذج باستدعاء أدوات متعددة في رد واحد",
"proxy-server": "خادم وكيل",
"update-model-list": "تحديث قائمة النماذج"
}

View File

@ -156,5 +156,7 @@
"error": "Fehler",
"feedback": "Feedback",
"waiting-mcp-server": "Warten auf Antwort vom MCP-Server",
"parallel-tool-calls": "Erlauben Sie dem Modell, mehrere Tools in einer einzigen Antwort aufzurufen"
"parallel-tool-calls": "Erlauben Sie dem Modell, mehrere Tools in einer einzigen Antwort aufzurufen",
"proxy-server": "Proxy-Server",
"update-model-list": "Modellliste aktualisieren"
}

View File

@ -156,5 +156,7 @@
"error": "Error",
"feedback": "Feedback",
"waiting-mcp-server": "Waiting for MCP server response",
"parallel-tool-calls": "Allow the model to call multiple tools in a single reply"
"parallel-tool-calls": "Allow the model to call multiple tools in a single reply",
"proxy-server": "Proxy server",
"update-model-list": "Update model list"
}

View File

@ -156,5 +156,7 @@
"error": "Erreur",
"feedback": "Retour",
"waiting-mcp-server": "En attente de la réponse du serveur MCP",
"parallel-tool-calls": "Permettre au modèle d'appeler plusieurs outils en une seule réponse"
"parallel-tool-calls": "Permettre au modèle d'appeler plusieurs outils en une seule réponse",
"proxy-server": "Serveur proxy",
"update-model-list": "Mettre à jour la liste des modèles"
}

View File

@ -156,5 +156,7 @@
"error": "エラー",
"feedback": "フィードバック",
"waiting-mcp-server": "MCPサーバーの応答を待機中",
"parallel-tool-calls": "モデルが単一の返信で複数のツールを呼び出すことを許可する"
"parallel-tool-calls": "モデルが単一の返信で複数のツールを呼び出すことを許可する",
"proxy-server": "プロキシサーバー",
"update-model-list": "モデルリストを更新"
}

View File

@ -156,5 +156,7 @@
"error": "오류",
"feedback": "피드백",
"waiting-mcp-server": "MCP 서버 응답 대기 중",
"parallel-tool-calls": "모델이 단일 응답에서 여러 도구를 호출할 수 있도록 허용"
"parallel-tool-calls": "모델이 단일 응답에서 여러 도구를 호출할 수 있도록 허용",
"proxy-server": "프록시 서버",
"update-model-list": "모델 목록 업데이트"
}

View File

@ -156,5 +156,7 @@
"error": "Ошибка",
"feedback": "Обратная связь",
"waiting-mcp-server": "Ожидание ответа от сервера MCP",
"parallel-tool-calls": "Разрешить модели вызывать несколько инструментов в одном ответе"
"parallel-tool-calls": "Разрешить модели вызывать несколько инструментов в одном ответе",
"proxy-server": "Прокси-сервер",
"update-model-list": "Обновить список моделей"
}

View File

@ -156,5 +156,7 @@
"error": "错误",
"feedback": "反馈",
"waiting-mcp-server": "等待 MCP 服务器响应",
"parallel-tool-calls": "允许模型在单轮回复中调用多个工具"
"parallel-tool-calls": "允许模型在单轮回复中调用多个工具",
"proxy-server": "代理服务器",
"update-model-list": "更新模型列表"
}

View File

@ -156,5 +156,7 @@
"error": "錯誤",
"feedback": "反饋",
"waiting-mcp-server": "等待MCP伺服器響應",
"parallel-tool-calls": "允許模型在單輪回覆中調用多個工具"
"parallel-tool-calls": "允許模型在單輪回覆中調用多個工具",
"proxy-server": "代理伺服器",
"update-model-list": "更新模型列表"
}

View File

@ -6,7 +6,7 @@
</span>
<p>
OpenMCP Client 0.1.1 OpenMCP@<a href="https://www.zhihu.com/people/can-meng-zhong-de-che-xian">锦恢</a> 开发
OpenMCP Client 0.1.4 OpenMCP@<a href="https://www.zhihu.com/people/can-meng-zhong-de-che-xian">锦恢</a> 开发
</p>
<p>

View File

@ -121,4 +121,9 @@ const validateForm = async () => {
border-radius: 4px;
margin-bottom: 16px;
}
.connection-option .el-form-item {
margin-bottom: 0;
}
</style>

View File

@ -53,7 +53,7 @@ function clearLogs() {
<style>
.connection-log {
height: 90vh;
height: 100%;
display: flex;
flex-direction: column;
gap: 12px;

View File

@ -1,5 +1,5 @@
<template>
<el-scrollbar>
<el-scrollbar height="98%">
<div class="connection-container" @dragover.prevent="handleDragOver" @drop.prevent="handleDrop">
<div v-if="isDraging" class="drag-mask">
<span class="iconfont icon-connect"></span>
@ -24,7 +24,6 @@
</div>
</div>
</el-scrollbar>
</template>
<script setup lang="ts">
@ -126,6 +125,7 @@ function handleDrop(event: DragEvent) {
<style>
.connection-container {
display: flex;
max-height: 85vh;
}
@ -133,7 +133,6 @@ function handleDrop(event: DragEvent) {
display: flex;
flex-direction: column;
width: 45%;
max-height: 85vh;
max-width: 500px;
min-width: 350px;
padding: 5px 20px;
@ -143,7 +142,6 @@ function handleDrop(event: DragEvent) {
display: flex;
flex-direction: column;
width: 55%;
max-height: 85vh;
min-width: 450px;
padding: 5px 20px;
}

View File

@ -30,6 +30,66 @@ function prettifyMapKeys(keys: MapIterator<string>) {
return result.join('\n');
}
function _processSchemaNode(node: any, defs: Record<string, any> = {}): any {
// Handle $ref references
if ('$ref' in node) {
const refPath = node['$ref'];
if (refPath.startsWith('#/$defs/')) {
const refName = refPath.split('/').pop();
if (refName && refName in defs) {
// Process the referenced definition
return _processSchemaNode(defs[refName], defs);
}
}
}
// Start with a new schema object
const result: Record<string, any> = {};
// Copy the basic properties
if ('type' in node) {
result.type = node.type;
}
// Handle anyOf (often used for optional fields with None)
if ('anyOf' in node) {
const nonNullTypes = node.anyOf.filter((t: any) => t?.type !== 'null');
if (nonNullTypes.length > 0) {
// Process the first non-null type
const processed = _processSchemaNode(nonNullTypes[0], defs);
Object.assign(result, processed);
}
}
// Handle description
if ('description' in node) {
result.description = node.description;
}
// Handle object properties recursively
if (node?.type === 'object' && 'properties' in node) {
result.type = 'object';
result.properties = {};
// Process each property
for (const [propName, propSchema] of Object.entries(node.properties)) {
result.properties[propName] = _processSchemaNode(propSchema as any, defs);
}
// Add required fields if present
if ('required' in node) {
result.required = node.required;
}
}
// Handle arrays
if (node?.type === 'array' && 'items' in node) {
result.type = 'array';
result.items = _processSchemaNode(node.items, defs);
}
return result;
}
export class McpClient {
// 连接入参
@ -139,6 +199,11 @@ export class McpClient {
this.tools = new Map<string, ToolItem>();
msg.tools.forEach(tool => {
const standardSchema = _processSchemaNode(tool.inputSchema, tool.inputSchema.$defs || {});
console.log(standardSchema);
tool.inputSchema = standardSchema;
this.tools!.set(tool.name, tool);
});

View File

@ -104,18 +104,15 @@ function deleteServer(index: number) {
display: flex;
align-items: center;
width: 150px;
height: 50px;
border-right: 1px solid var(--border-color);
padding: 15px 25px;
padding: 0 25px;
}
.server-name {
font-size: 15px;
font-size: 12px;
}
.server-item {
padding: 10px;
margin-bottom: 5px;
cursor: pointer;
border-radius: 4px;
display: flex;

View File

@ -51,10 +51,21 @@
<div class="setting-save-container">
<el-button
id="add-new-server-button"
type="success" @click="addNewServer">
type="success"
@click="addNewServer"
>
{{ t("add-new-server") }}
</el-button>
<el-button
id="add-new-server-button"
type="success"
@click="updateModels"
:loading="updateModelLoading"
>
{{ t('update-model-list') }}
</el-button>
<el-button
type="primary"
id="test-llm-button"
@ -120,6 +131,7 @@ import { pinkLog } from './util';
import ConnectInterfaceOpenai from './connect-interface-openai.vue';
import ConnectTest from './connect-test.vue';
import { llmSettingRef, makeSimpleTalk, simpleTestResult } from './api';
import { useMessageBridge } from '@/api/message-bridge';
defineComponent({ name: 'api' });
const { t } = useI18n();
@ -212,6 +224,43 @@ function addNewProvider() {
};
}
const updateModelLoading = ref(false);
async function updateModels() {
updateModelLoading.value = true;
const llm = llms[llmManager.currentModelIndex];
const apiKey = llm.userToken;
const baseURL = llm.baseUrl;
const bridge = useMessageBridge();
const { code, msg } = await bridge.commandRequest('llm/models', {
apiKey,
baseURL
});
const isGemini = baseURL.includes('googleapis');
if (code === 200 && Array.isArray(msg)) {
const models = msg
.filter(item => item.object === 'model')
.map(item => {
let modelName = item.id as string;
if (isGemini && modelName.includes('/')) {
modelName = modelName.split('/')[1];
}
return modelName;
});
llm.models = models;
saveLlmSetting();
} else {
ElMessage.error('模型列表更新失败' + msg);
}
updateModelLoading.value = false;
}
function updateProvider() {
if (editingIndex.value < 0) {
return;

View File

@ -14,7 +14,7 @@
v-for="option in llms[llmManager.currentModelIndex].models"
:value="option"
:label="option"
:key="option.id"
:key="option"
></el-option>
</el-select>
</div>

View File

@ -1,4 +1,7 @@
<template>
<div class="extra-info warning" v-if="isGoogle">
当前模型组协议兼容性较差特别是 gemini-2.0-flash 模型的函数调用能力不稳定如果想要稳定使用 gemini 的服务请尽可能使用最新的模型或者使用 newApi 进行协议转接
</div>
<div class="connect-test" v-if="simpleTestResult.done || simpleTestResult.error">
<div class="test-result">
<div class="result-item" v-if="simpleTestResult.done">
@ -18,9 +21,19 @@
<script setup lang="ts">
import { useI18n } from 'vue-i18n';
import { simpleTestResult } from './api';
import { llmManager, llms } from './llm';
import { computed } from '@vue/reactivity';
const { t } = useI18n();
const isGoogle = computed(() => {
const model = llms[llmManager.currentModelIndex];
return model.userModel.startsWith('gemini') || model.baseUrl.includes('googleapis');
});
console.log(llms[llmManager.currentModelIndex]);
</script>
<style scoped>
@ -61,4 +74,12 @@ const { t } = useI18n();
.result-item .iconfont {
font-size: 16px;
}
.extra-info.warning {
background-color: rgba(230, 162, 60, 0.5);
padding: 10px;
border-radius: 4px;
margin-top: 15px;
margin-bottom: 10px;
}
</style>

View File

@ -27,6 +27,20 @@
@change="safeSaveSetting" />
</div>
</div>
<div class="setting-option">
<span>
<span class="iconfont icon-proxy"></span>
<span class="option-title">{{ t('proxy-server') }}</span>
</span>
<div style="width: 200px;">
<el-input
v-model="mcpSetting.proxyServer"
:placeholder="'http://localhost:7890'"
@input="safeSaveSetting"
/>
</div>
</div>
</div>
</template>

View File

@ -6,12 +6,25 @@ import type { ToolCall } from '@/components/main-panel/chat/chat-box/chat';
import I18n from '@/i18n';
const { t } = I18n.global;
export const llms = reactive<any[]>([]);
export const llms = reactive<BasicLlmDescription[]>([]);
export const llmManager = reactive({
currentModelIndex: 0,
});
export interface BasicLlmDescription {
id: string,
name: string,
baseUrl: string,
models: string[],
isOpenAICompatible: boolean,
description: string,
website: string,
userToken: string,
userModel: string,
[key: string]: any
}
export function createTest(call: ToolCall) {
const tab = createTab('tool', 0);
tab.componentIndex = 2;
@ -21,8 +34,8 @@ export function createTest(call: ToolCall) {
const storage: ToolStorage = {
activeNames: [0],
currentToolName: call.function.name,
formData: JSON.parse(call.function.arguments)
currentToolName: call.function!.name!,
formData: JSON.parse(call.function!.arguments!)
};
tab.storage = storage;

View File

@ -35,10 +35,11 @@
"webpack-node-externals": "^3.0.0"
},
"dependencies": {
"@modelcontextprotocol/sdk": "^1.10.2",
"@modelcontextprotocol/sdk": "^1.12.1",
"@seald-io/nedb": "^4.1.1",
"open": "^10.1.2",
"openai": "^4.96.0",
"axios": "^1.9.0",
"openai": "^5.0.1",
"pako": "^2.1.0",
"pino": "^9.6.0",
"pino-pretty": "^13.0.0",

View File

@ -0,0 +1,211 @@
import axios from "axios";
import { HttpsProxyAgent } from 'https-proxy-agent';
interface FetchOptions {
method?: string;
headers?: Record<string, string>;
body?: string | Buffer | FormData | URLSearchParams | object;
[key: string]: any;
}
interface FetchResponse {
ok: boolean;
status: number;
statusText: string;
headers: Headers;
url: string;
redirected: boolean;
type: string;
body: any;
json(): Promise<any>;
text(): Promise<string>;
arrayBuffer(): Promise<ArrayBuffer>;
getReader(): ReadableStreamDefaultReader;
}
interface ReadableStreamDefaultReader {
read(): Promise<{ done: boolean, value?: any }>;
cancel(): Promise<void>;
releaseLock(): void;
get closed(): boolean;
}
/**
* axios fetch
*/
function adaptRequestOptions(url: string, options: FetchOptions = {}): any {
const axiosConfig: any = {
url,
method: options.method || 'GET',
headers: options.headers,
responseType: 'stream'
};
// 处理 body/data 转换
if (options.body) {
if (typeof options.body === 'string' || Buffer.isBuffer(options.body)) {
axiosConfig.data = options.body;
} else if (typeof options.body === 'object') {
// 如果是 FormData、URLSearchParams 等特殊类型需要特殊处理
if (options.body instanceof FormData) {
axiosConfig.data = options.body;
axiosConfig.headers = {
...axiosConfig.headers,
'Content-Type': 'multipart/form-data'
};
} else if (options.body instanceof URLSearchParams) {
axiosConfig.data = options.body.toString();
axiosConfig.headers = {
...axiosConfig.headers,
'Content-Type': 'application/x-www-form-urlencoded'
};
} else {
// 普通 JSON 对象
axiosConfig.data = JSON.stringify(options.body);
axiosConfig.headers = {
...axiosConfig.headers,
'Content-Type': 'application/json'
};
}
}
}
return axiosConfig;
}
/**
* axios fetch Response
*/
function adaptResponse(axiosResponse: FetchOptions): FetchResponse {
// 创建 Headers 对象
const headers = new Headers();
Object.entries(axiosResponse.headers || {}).forEach(([key, value]) => {
headers.append(key, value);
});
// 创建符合 Fetch API 的 Response 对象
const fetchResponse = {
ok: axiosResponse.status >= 200 && axiosResponse.status < 300,
status: axiosResponse.status,
statusText: axiosResponse.statusText,
headers: headers,
url: axiosResponse.config.url,
redirected: false, // axios 不直接提供此信息
type: 'basic', // 简单类型
body: null,
// 标准方法
json: async () => {
if (typeof axiosResponse.data === 'object') {
return axiosResponse.data;
}
throw new Error('Response is not JSON');
},
text: async () => {
if (typeof axiosResponse.data === 'string') {
return axiosResponse.data;
}
return JSON.stringify(axiosResponse.data);
},
arrayBuffer: async () => {
throw new Error('arrayBuffer not implemented for streaming');
},
// 流式支持
getReader: () => {
if (!axiosResponse.data.on || typeof axiosResponse.data.on !== 'function') {
throw new Error('Not a stream response');
}
// 将 Node.js 流转换为 Web Streams 的 ReadableStream
const nodeStream = axiosResponse.data;
let isCancelled = false;
return {
read: () => {
if (isCancelled) {
return Promise.resolve({ done: true });
}
return new Promise((resolve, reject) => {
const onData = (chunk: any) => {
cleanup();
resolve({ done: false, value: chunk });
};
const onEnd = () => {
cleanup();
resolve({ done: true });
};
const onError = (err: Error) => {
cleanup();
reject(err);
};
const cleanup = () => {
nodeStream.off('data', onData);
nodeStream.off('end', onEnd);
nodeStream.off('error', onError);
};
nodeStream.once('data', onData);
nodeStream.once('end', onEnd);
nodeStream.once('error', onError);
});
},
cancel: () => {
isCancelled = true;
nodeStream.destroy();
return Promise.resolve();
},
releaseLock: () => {
// TODO: 实现 releaseLock 方法
},
get closed() {
return isCancelled;
}
};
}
} as FetchResponse;
// 设置 body 为可读流
if (axiosResponse.data.on && typeof axiosResponse.data.on === 'function') {
fetchResponse.body = {
getReader: fetchResponse.getReader
};
}
return fetchResponse;
}
/**
* @description - axios fetch
*/
export async function axiosFetch(input: any, init: any, requestOption: { proxyServer?: string } = {}): Promise<any> {
const axiosConfig = adaptRequestOptions(input, init);
const {
proxyServer = ''
} = requestOption;
if (proxyServer) {
const proxyAgent = new HttpsProxyAgent(proxyServer);
axiosConfig.httpsAgent = proxyAgent;
axiosConfig.httpAgent = proxyAgent;
}
try {
const response = await axios(axiosConfig) as FetchOptions;
return adaptResponse(response);
} catch (error: any) {
if (error.response) {
return adaptResponse(error.response);
}
throw error;
}
}

View File

@ -47,6 +47,30 @@ export const llms = [
userToken: '',
userModel: 'doubao-1.5-pro-32k'
},
{
id: 'gemini',
name: 'Gemini',
baseUrl: 'https://generativelanguage.googleapis.com/v1beta/openai/',
models: ['gemini-2.0-flash', 'gemini-2.5-flash-preview-05-20', 'gemini-2.5-pro-preview-05-06'],
provider: 'google',
isOpenAICompatible: true,
description: 'Google Gemini',
website: 'https://ai.google.dev/gemini-api/docs/models?hl=zh-cn%2F%2Fgemini-2.5-pro-preview-05-06#gemini-2.5-pro-preview-05-06',
userToken: '',
userModel: 'gemini-2.0-flash'
},
{
id: 'grok',
name: 'Grok',
baseUrl: 'https://api.x.ai/v1',
models: ['grok-3', 'grok-3-fast', 'grok-3-mini', 'grok-3-mini-fast'],
provider: 'xai',
isOpenAICompatible: true,
description: 'xAI Grok',
website: 'https://docs.x.ai/docs/models',
userToken: '',
userModel: 'grok-3-mini'
},
{
id: 'mistral',
name: 'Mistral',
@ -108,3 +132,5 @@ export const llms = [
userModel: 'moonshot-v1-8k'
}
];

View File

@ -4,7 +4,7 @@ import { RequestData } from "../common/index.dto.js";
import { PostMessageble } from "../hook/adapter.js";
import { getClient } from "../mcp/connect.service.js";
import { abortMessageService, streamingChatCompletion } from "./llm.service.js";
import { OpenAI } from "openai";
export class LlmController {
@Controller('llm/chat/completions')
@ -35,4 +35,20 @@ export class LlmController {
return abortMessageService(data, webview);
}
@Controller('llm/models')
async getModels(data: RequestData, webview: PostMessageble) {
const {
baseURL,
apiKey,
} = data;
const client = new OpenAI({ apiKey, baseURL });
const models = await client.models.list();
return {
code: 200,
msg: models.data
}
}
}

View File

@ -7,3 +7,20 @@ export type MyMessageType = OpenAI.Chat.ChatCompletionMessageParam & {
export type MyToolMessageType = OpenAI.Chat.ChatCompletionToolMessageParam & {
extraInfo?: any;
}
export interface OpenMcpChatOption {
baseURL: string;
apiKey: string;
model: string;
messages: any[];
temperature?: number;
tools?: any[];
parallelToolCalls?: boolean;
}
export interface MyStream<T> extends AsyncIterable<T> {
[Symbol.asyncIterator](): AsyncIterator<T>;
controller: {
abort(): void;
};
}

View File

@ -5,6 +5,7 @@ import { RestfulResponse } from "../common/index.dto.js";
import { ocrDB } from "../hook/db.js";
import type { ToolCallContent } from "../mcp/client.dto.js";
import { ocrWorkerStorage } from "../mcp/ocr.service.js";
import { axiosFetch } from "../hook/axios-fetch.js";
export let currentStream: AsyncIterable<any> | null = null;
@ -12,33 +13,51 @@ export async function streamingChatCompletion(
data: any,
webview: PostMessageble
) {
let {
baseURL,
apiKey,
model,
messages,
temperature,
tools = [],
parallelToolCalls = true
} = data;
const {
baseURL,
apiKey,
model,
messages,
temperature,
tools = [],
parallelToolCalls = true,
proxyServer = ''
} = data;
const client = new OpenAI({
baseURL,
apiKey
apiKey,
fetch: async (input: string | URL | Request, init?: RequestInit) => {
console.log('openai fetch begin, proxyServer:', proxyServer);
if (model.startsWith('gemini') && init) {
// 该死的 google
init.headers = {
'Content-Type': 'application/json',
'Authorization': `Bearer ${apiKey}`
}
}
return await axiosFetch(input, init, { proxyServer });
}
});
if (tools.length === 0) {
tools = undefined;
}
const seriableTools = (tools.length === 0) ? undefined: tools;
const seriableParallelToolCalls = (tools.length === 0)?
undefined: model.startsWith('gemini') ? undefined : parallelToolCalls;
await postProcessMessages(messages);
console.log('seriableTools', seriableTools);
console.log('seriableParallelToolCalls', seriableParallelToolCalls);
const stream = await client.chat.completions.create({
model,
messages,
temperature,
tools,
parallel_tool_calls: parallelToolCalls,
tools: seriableTools,
parallel_tool_calls: seriableParallelToolCalls,
stream: true
});

View File

@ -7,6 +7,7 @@ import { VSCodeWebViewLike } from './hook/adapter.js';
import path from 'node:path';
import * as fs from 'node:fs';
import { setRunningCWD } from './hook/setting.js';
import axios from 'axios';
export interface VSCodeMessage {
command: string;

View File

@ -166,11 +166,7 @@ export class McpClient {
// 调用工具
public async callTool(options: { name: string; arguments: Record<string, any>, callToolOption?: any }) {
const { callToolOption, ...methodArgs } = options;
console.log('methodArgs', methodArgs);
console.log('callToolOption', callToolOption);
const res = await this.client.callTool(methodArgs, undefined, callToolOption);
console.log('callTool res', res);
return res;
}
}

View File

@ -15,9 +15,6 @@ export class ConnectController {
async lookupEnvVar(data: RequestData, webview: PostMessageble) {
const { keys } = data;
const values = keys.map((key: string) => {
// TODO: 在 Windows 上测试
console.log(key);
console.log(process.env);
if (process.platform === 'win32') {
switch (key) {

View File

@ -79,7 +79,6 @@ export class PanelController {
@Controller('system-prompts/load')
async loadSystemPrompts(data: RequestData, webview: PostMessageble) {
const client = getClient(data.clientId);
const queryPrompts = await systemPromptDB.findAll();
const prompts = [];
for (const prompt of queryPrompts) {

View File

@ -52,7 +52,11 @@ export function loadSetting(): IConfig {
try {
const configData = fs.readFileSync(configPath, 'utf-8');
return JSON.parse(configData) as IConfig;
const config = JSON.parse(configData) as IConfig;
if (!config.LLM_INFO || (Array.isArray(config.LLM_INFO) && config.LLM_INFO.length === 0)) {
config.LLM_INFO = llms;
}
return config;
} catch (error) {
console.error('Error loading config file, creating new one:', error);
return createConfig();

View File

@ -68,6 +68,19 @@ export function getConnectionConfig() {
let connection;
try {
connection = JSON.parse(rawConnectionString) as IConnectionConfig;
// 对连接信息进行校验
if (!connection.items) {
connection = { items: [] };
}
connection.items = connection.items.filter(item => {
if (Array.isArray(item) && item.length === 0) {
return false;
}
return true;
});
} catch (error) {
connection = { items: [] };
}
@ -110,6 +123,19 @@ export function getWorkspaceConnectionConfig() {
let connection;
try {
connection = JSON.parse(rawConnectionString) as IConnectionConfig;
// 对连接信息进行校验
if (!connection.items) {
connection = { items: [] };
}
connection.items = connection.items.filter(item => {
if (Array.isArray(item) && item.length === 0) {
return false;
}
return true;
});
} catch (error) {
connection = { items: [] };
}
@ -169,7 +195,7 @@ export function saveWorkspaceConnectionConfig(workspace: string) {
const workspacePath = getWorkspacePath();
for (let item of connectionConfig.items) {
const connections = Array.isArray(item)? item : [item];
const connections = Array.isArray(item) ? item : [item];
for (let connection of connections) {
const connectionType = (connection.type || connection.connectionType).toUpperCase() as ConnectionType;
connection.type = undefined;
@ -283,7 +309,7 @@ export function getWorkspaceConnectionConfigItemByPath(absPath: string) {
const normaliseAbsPath = absPath.replace(/\\/g, '/');
for (let item of workspaceConnectionConfig.items) {
const nItem = Array.isArray(item)? item[0] : item;
const nItem = Array.isArray(item) ? item[0] : item;
const filePath = normaliseConnectionFilePath(nItem, workspacePath);
if (filePath === normaliseAbsPath) {
@ -303,7 +329,7 @@ export function getInstalledConnectionConfigItemByPath(absPath: string) {
const normaliseAbsPath = absPath.replace(/\\/g, '/');
for (let item of installedConnectionConfig.items) {
const nItem = Array.isArray(item)? item[0] : item;
const nItem = Array.isArray(item) ? item[0] : item;
const filePath = (nItem.filePath || '').replace(/\\/g, '/');
if (filePath === normaliseAbsPath) {

View File

@ -58,7 +58,8 @@ export async function acquireInstalledConnection(): Promise<McpOptions[]> {
// 让用户选择连接类型
const connectionType = await vscode.window.showQuickPick(['STDIO', 'SSE', 'STREAMABLE_HTTP'], {
placeHolder: '请选择连接类型',
canPickMany: false
canPickMany: false,
ignoreFocusOut: true,
});
if (!connectionType) {

View File

@ -63,7 +63,7 @@ export class McpWorkspaceConnectProvider implements vscode.TreeDataProvider<Conn
const item = await acquireUserCustomConnection();
if (!item) {
if (item.length === 0) {
return;
}

View File

@ -59,8 +59,10 @@ export async function validateAndGetCommandPath(command: string, cwd?: string):
export async function acquireUserCustomConnection(): Promise<McpOptions[]> {
// 让用户选择连接类型
const connectionType = await vscode.window.showQuickPick(['STDIO', 'SSE'], {
placeHolder: '请选择连接类型'
const connectionType = await vscode.window.showQuickPick(['STDIO', 'SSE', 'STREAMABLE_HTTP'], {
placeHolder: '请选择连接类型',
canPickMany: false,
ignoreFocusOut: true,
});
if (!connectionType) {