验证 task-loop

This commit is contained in:
锦恢 2025-05-10 20:10:23 +08:00
parent 1fef3a1150
commit e20bbf2b42
14 changed files with 159 additions and 429 deletions

View File

@ -30,4 +30,9 @@ New-Item -ItemType Directory -Path ./software/openmcp-sdk -Force
Remove-Item -Recurse -Force ./software/openmcp-sdk/* -ErrorAction SilentlyContinue
Copy-Item -Recurse -Path ./openmcp-sdk -Destination ./software/ -Force
$serviceJob = Start-Job -ScriptBlock {
param($workDir)
npm run build:task-loop
} -ArgumentList $currentDir
Write-Output "finish building services in ./openmcp-sdk"

View File

@ -13,4 +13,6 @@ mkdir -p ./software/openmcp-sdk
rm -rf ./software/openmcp-sdk
cp -r ./openmcp-sdk ./software/
npm run build:task-loop
echo "finish building services in ./openmcp-sdk"

View File

@ -1,6 +1,5 @@
import { pinkLog, redLog } from '@/views/setting/util';
import { acquireVsCodeApi, electronApi, getPlatform } from './platform';
import { ref } from 'vue';
export interface VSCodeMessage {
command: string;
@ -20,7 +19,7 @@ interface AddCommandListenerOption {
once: boolean // 只调用一次就销毁
}
class MessageBridge {
export class MessageBridge {
private ws: WebSocket | null = null;
private handlers = new Map<string, Set<CommandHandler>>();
private isConnected: Promise<boolean> | null = null;
@ -44,6 +43,11 @@ class MessageBridge {
pinkLog('当前模式: electron');
break;
case 'nodejs':
this.setupNodejsListener();
pinkLog('当前模式: nodejs');
break;
case 'web':
this.setupWebSocket();
pinkLog('当前模式: web');
@ -114,6 +118,17 @@ class MessageBridge {
};
}
private setupNodejsListener() {
const EventEmitter = require('events');
const eventEmitter = new EventEmitter();
this.postMessage = (message) => {
eventEmitter.emit('server', message);
};
}
/**
* @description message command
* @param message

View File

@ -1,14 +1,16 @@
export type OpenMcpSupportPlatform = 'web' | 'vscode' | 'electron';
export type OpenMcpSupportPlatform = 'web' | 'vscode' | 'electron' | 'nodejs';
export const acquireVsCodeApi = (window as any)['acquireVsCodeApi'];
export const electronApi = (window as any)['electronApi'];
export const isNodejs = (window as any)['nodejs'];
export function getPlatform(): OpenMcpSupportPlatform {
if (typeof acquireVsCodeApi !== 'undefined') {
if (acquireVsCodeApi) {
return 'vscode';
} else if (typeof electronApi !== 'undefined') {
} else if (electronApi) {
return 'electron';
} else if (isNodejs) {
return 'nodejs';
} else {
return 'web';
}

View File

@ -1,357 +0,0 @@
/* eslint-disable */
import type { Ref } from "vue";
import { ToolCall, ChatStorage, getToolSchema, MessageState } from "../chat-box/chat";
import { useMessageBridge } from "@/api/message-bridge";
import type { OpenAI } from 'openai';
import { llmManager, llms } from "@/views/setting/llm";
import { pinkLog, redLog } from "@/views/setting/util";
import { ElMessage } from "element-plus";
import { handleToolCalls } from "./handle-tool-calls";
export type ChatCompletionChunk = OpenAI.Chat.Completions.ChatCompletionChunk;
export type ChatCompletionCreateParamsBase = OpenAI.Chat.Completions.ChatCompletionCreateParams & { id?: string };
interface TaskLoopOptions {
maxEpochs: number;
maxJsonParseRetry: number;
}
interface IErrorMssage {
state: MessageState,
msg: string
}
interface IDoConversationResult {
stop: boolean;
}
/**
* @description
*/
export class TaskLoop {
private bridge = useMessageBridge();
private currentChatId = '';
private completionUsage: ChatCompletionChunk['usage'] | undefined;
constructor(
private readonly streamingContent: Ref<string>,
private readonly streamingToolCalls: Ref<ToolCall[]>,
private onError: (error: IErrorMssage) => void = (msg) => {},
private onChunk: (chunk: ChatCompletionChunk) => void = (chunk) => {},
private onDone: () => void = () => {},
private onEpoch: () => void = () => {},
private readonly taskOptions: TaskLoopOptions = { maxEpochs: 20, maxJsonParseRetry: 3 },
) {
}
private handleChunkDeltaContent(chunk: ChatCompletionChunk) {
const content = chunk.choices[0]?.delta?.content || '';
if (content) {
this.streamingContent.value += content;
}
}
private handleChunkDeltaToolCalls(chunk: ChatCompletionChunk) {
const toolCall = chunk.choices[0]?.delta?.tool_calls?.[0];
if (toolCall) {
const currentCall = this.streamingToolCalls.value[toolCall.index];
if (currentCall === undefined) {
// 新的工具调用开始
this.streamingToolCalls.value[toolCall.index] = {
id: toolCall.id,
index: toolCall.index,
type: 'function',
function: {
name: toolCall.function?.name || '',
arguments: toolCall.function?.arguments || ''
}
};
} else {
// 累积现有工具调用的信息
if (currentCall) {
if (toolCall.id) {
currentCall.id = toolCall.id;
}
if (toolCall.function?.name) {
currentCall.function.name = toolCall.function.name;
}
if (toolCall.function?.arguments) {
currentCall.function.arguments += toolCall.function.arguments;
}
}
}
}
}
private handleChunkUsage(chunk: ChatCompletionChunk) {
const usage = chunk.usage;
if (usage) {
this.completionUsage = usage;
}
}
private doConversation(chatData: ChatCompletionCreateParamsBase) {
return new Promise<IDoConversationResult>((resolve, reject) => {
const chunkHandler = this.bridge.addCommandListener('llm/chat/completions/chunk', data => {
// data.code 一定为 200否则不会走这个 route
const { chunk } = data.msg as { chunk: ChatCompletionChunk };
// 处理增量的 content 和 tool_calls
this.handleChunkDeltaContent(chunk);
this.handleChunkDeltaToolCalls(chunk);
this.handleChunkUsage(chunk);
this.onChunk(chunk);
}, { once: false });
const doneHandler = this.bridge.addCommandListener('llm/chat/completions/done', data => {
this.onDone();
chunkHandler();
errorHandler();
resolve({
stop: false
});
}, { once: true });
const errorHandler = this.bridge.addCommandListener('llm/chat/completions/error', data => {
this.onError({
state: MessageState.ReceiveChunkError,
msg: data.msg || '请求模型服务时发生错误'
});
chunkHandler();
doneHandler();
resolve({
stop: true
});
}, { once: true });
console.log(chatData);
this.bridge.postMessage({
command: 'llm/chat/completions',
data: JSON.parse(JSON.stringify(chatData)),
});
});
}
public makeChatData(tabStorage: ChatStorage): ChatCompletionCreateParamsBase | undefined {
const baseURL = llms[llmManager.currentModelIndex].baseUrl;
const apiKey = llms[llmManager.currentModelIndex].userToken || '';
if (apiKey.trim() === '') {
if (tabStorage.messages.length > 0 && tabStorage.messages[tabStorage.messages.length - 1].role === 'user') {
tabStorage.messages.pop();
ElMessage.error('请先设置 API Key');
}
return undefined;
}
const model = llms[llmManager.currentModelIndex].userModel;
const temperature = tabStorage.settings.temperature;
const tools = getToolSchema(tabStorage.settings.enableTools);
const userMessages = [];
if (tabStorage.settings.systemPrompt) {
userMessages.push({
role: 'system',
content: tabStorage.settings.systemPrompt
});
}
// 如果超出了 tabStorage.settings.contextLength, 则删除最早的消息
const loadMessages = tabStorage.messages.slice(- tabStorage.settings.contextLength);
userMessages.push(...loadMessages);
// 增加一个id用于锁定状态
const id = crypto.randomUUID();
const chatData = {
id,
baseURL,
apiKey,
model,
temperature,
tools,
messages: userMessages,
} as ChatCompletionCreateParamsBase;
return chatData;
}
public abort() {
this.bridge.postMessage({
command: 'llm/chat/completions/abort',
data: {
id: this.currentChatId
}
});
this.streamingContent.value = '';
this.streamingToolCalls.value = [];
}
public registerOnError(handler: (msg: IErrorMssage) => void) {
this.onError = handler;
}
public registerOnChunk(handler: (chunk: ChatCompletionChunk) => void) {
this.onChunk = handler;
}
public registerOnDone(handler: () => void) {
this.onDone = handler;
}
public registerOnEpoch(handler: () => void) {
this.onEpoch = handler;
}
public setMaxEpochs(maxEpochs: number) {
this.taskOptions.maxEpochs = maxEpochs;
}
/**
* @description DOM
*/
public async start(tabStorage: ChatStorage, userMessage: string) {
// 添加目前的消息
tabStorage.messages.push({
role: 'user',
content: userMessage,
extraInfo: {
created: Date.now(),
state: MessageState.Success,
serverName: llms[llmManager.currentModelIndex].id || 'unknown'
}
});
let jsonParseErrorRetryCount = 0;
for (let i = 0; i < this.taskOptions.maxEpochs; ++ i) {
this.onEpoch();
// 初始累计清空
this.streamingContent.value = '';
this.streamingToolCalls.value = [];
this.completionUsage = undefined;
// 构造 chatData
const chatData = this.makeChatData(tabStorage);
if (!chatData) {
this.onDone();
break;
}
this.currentChatId = chatData.id!;
// 发送请求
const doConverationResult = await this.doConversation(chatData);
console.log(doConverationResult);
// 如果存在需要调度的工具
if (this.streamingToolCalls.value.length > 0) {
tabStorage.messages.push({
role: 'assistant',
content: this.streamingContent.value || '',
tool_calls: this.streamingToolCalls.value,
extraInfo: {
created: Date.now(),
state: MessageState.Success,
serverName: llms[llmManager.currentModelIndex].id || 'unknown'
}
});
pinkLog('调用工具数量:' + this.streamingToolCalls.value.length);
for (const toolCall of this.streamingToolCalls.value || []) {
const toolCallResult = await handleToolCalls(toolCall);
if (toolCallResult.state === MessageState.ParseJsonError) {
// 如果是因为解析 JSON 错误,则重新开始
tabStorage.messages.pop();
jsonParseErrorRetryCount ++;
redLog('解析 JSON 错误 ' + toolCall?.function?.arguments);
// 如果因为 JSON 错误而失败太多,就只能中断了
if (jsonParseErrorRetryCount >= this.taskOptions.maxJsonParseRetry) {
tabStorage.messages.push({
role: 'assistant',
content: `解析 JSON 错误,无法继续调用工具 (累计错误次数 ${this.taskOptions.maxJsonParseRetry})`,
extraInfo: {
created: Date.now(),
state: toolCallResult.state,
serverName: llms[llmManager.currentModelIndex].id || 'unknown',
usage: undefined
}
});
break;
}
} else if (toolCallResult.state === MessageState.Success) {
tabStorage.messages.push({
role: 'tool',
index: toolCall.index || 0,
tool_call_id: toolCall.id || toolCall.function.name,
content: toolCallResult.content,
extraInfo: {
created: Date.now(),
state: toolCallResult.state,
serverName: llms[llmManager.currentModelIndex].id || 'unknown',
usage: this.completionUsage
}
});
} else if (toolCallResult.state === MessageState.ToolCall) {
tabStorage.messages.push({
role: 'tool',
index: toolCall.index || 0,
tool_call_id: toolCall.id || toolCall.function.name,
content: toolCallResult.content,
extraInfo: {
created: Date.now(),
state: toolCallResult.state,
serverName: llms[llmManager.currentModelIndex].id || 'unknown',
usage: this.completionUsage
}
});
}
}
} else if (this.streamingContent.value) {
tabStorage.messages.push({
role: 'assistant',
content: this.streamingContent.value,
extraInfo: {
created: Date.now(),
state: MessageState.Success,
serverName: llms[llmManager.currentModelIndex].id || 'unknown',
usage: this.completionUsage
}
});
break;
} else {
// 一些提示
break;
}
// 回答聚合完成后根据 stop 来决定是否提前中断
if (doConverationResult.stop) {
break;
}
}
}
}

View File

@ -10,20 +10,21 @@ import { handleToolCalls } from "./handle-tool-calls";
export type ChatCompletionChunk = OpenAI.Chat.Completions.ChatCompletionChunk;
export type ChatCompletionCreateParamsBase = OpenAI.Chat.Completions.ChatCompletionCreateParams & { id?: string };
interface TaskLoopOptions {
export interface TaskLoopOptions {
maxEpochs: number;
maxJsonParseRetry: number;
}
interface IErrorMssage {
export interface IErrorMssage {
state: MessageState,
msg: string
}
interface IDoConversationResult {
export interface IDoConversationResult {
stop: boolean;
}
/**
* @description
*/
@ -31,6 +32,7 @@ export class TaskLoop {
private bridge = useMessageBridge();
private currentChatId = '';
private completionUsage: ChatCompletionChunk['usage'] | undefined;
private llmConfig: any;
constructor(
private readonly streamingContent: Ref<string>,
@ -41,7 +43,7 @@ export class TaskLoop {
private onEpoch: () => void = () => {},
private readonly taskOptions: TaskLoopOptions = { maxEpochs: 20, maxJsonParseRetry: 3 },
) {
}
private handleChunkDeltaContent(chunk: ChatCompletionChunk) {
@ -143,8 +145,8 @@ export class TaskLoop {
}
public makeChatData(tabStorage: ChatStorage): ChatCompletionCreateParamsBase | undefined {
const baseURL = llms[llmManager.currentModelIndex].baseUrl;
const apiKey = llms[llmManager.currentModelIndex].userToken || '';
const baseURL = this.getLlmConfig().baseUrl;
const apiKey = this.getLlmConfig().userToken || '';
if (apiKey.trim() === '') {
@ -155,7 +157,7 @@ export class TaskLoop {
return undefined;
}
const model = llms[llmManager.currentModelIndex].userModel;
const model = this.getLlmConfig().userModel;
const temperature = tabStorage.settings.temperature;
const tools = getToolSchema(tabStorage.settings.enableTools);
@ -218,6 +220,32 @@ export class TaskLoop {
this.taskOptions.maxEpochs = maxEpochs;
}
/**
* @description LLM nodejs
* @param config
* @example
* setLlmConfig({
* id: 'openai',
* baseUrl: 'https://api.openai.com/v1',
* userToken: 'sk-xxx',
* userModel: 'gpt-3.5-turbo',
* })
*/
public setLlmConfig(config: any) {
this.llmConfig = config;
}
public getLlmConfig() {
if (this.llmConfig) {
return this.llmConfig;
}
return llms[llmManager.currentModelIndex];
}
public async connectToService() {
}
/**
* @description DOM
*/
@ -229,7 +257,7 @@ export class TaskLoop {
extraInfo: {
created: Date.now(),
state: MessageState.Success,
serverName: llms[llmManager.currentModelIndex].id || 'unknown'
serverName: this.getLlmConfig().id || 'unknown'
}
});
@ -270,7 +298,7 @@ export class TaskLoop {
extraInfo: {
created: Date.now(),
state: MessageState.Success,
serverName: llms[llmManager.currentModelIndex].id || 'unknown'
serverName: this.getLlmConfig().id || 'unknown'
}
});
@ -294,7 +322,7 @@ export class TaskLoop {
extraInfo: {
created: Date.now(),
state: toolCallResult.state,
serverName: llms[llmManager.currentModelIndex].id || 'unknown',
serverName: this.getLlmConfig().id || 'unknown',
usage: undefined
}
});
@ -309,7 +337,7 @@ export class TaskLoop {
extraInfo: {
created: Date.now(),
state: toolCallResult.state,
serverName: llms[llmManager.currentModelIndex].id || 'unknown',
serverName: this.getLlmConfig().id || 'unknown',
usage: this.completionUsage
}
});
@ -323,7 +351,7 @@ export class TaskLoop {
extraInfo: {
created: Date.now(),
state: toolCallResult.state,
serverName: llms[llmManager.currentModelIndex].id || 'unknown',
serverName: this.getLlmConfig().id || 'unknown',
usage: this.completionUsage
}
});
@ -337,7 +365,7 @@ export class TaskLoop {
extraInfo: {
created: Date.now(),
state: MessageState.Success,
serverName: llms[llmManager.currentModelIndex].id || 'unknown',
serverName: this.getLlmConfig().id || 'unknown',
usage: this.completionUsage
}
});

View File

@ -60,7 +60,13 @@ export default function highlight(option: HighlightOption = {}) {
const codeElement = codeBlock.querySelector('code');
const code = codeElement?.textContent || '';
navigator.clipboard.writeText(code).then(() => {
// 支持 nodejs 下运行
const thisWindow = window as any;
if (!thisWindow || !thisWindow.navigator || !thisWindow.navigator.clipboard) {
return;
}
window.navigator.clipboard.writeText(code).then(() => {
const originalText = button.textContent;
button.textContent = '已复制';
setTimeout(() => {

View File

@ -23,6 +23,12 @@ const pureHighLightMd = new MarkdownIt({
});
export const copyToClipboard = (text: string) => {
// 支持 nodejs 下运行
const thisWindow = window as any;
if (!thisWindow || !thisWindow.navigator || !thisWindow.navigator.clipboard) {
return;
}
return navigator.clipboard.writeText(text);
};

View File

@ -147,8 +147,8 @@ interface GetColorOption {
export class MacroColor {
private option: ComputedColorOption;
private rootStyles: CSSStyleDeclaration;
private theme: 'light' | 'dark';
private rootStyles?: CSSStyleDeclaration;
private theme: 'light' | 'dark' = 'dark';
public foregroundColor: RgbColor | undefined;
public backgroundColor: RgbColor | undefined;
public foregroundColorString: string;
@ -195,7 +195,7 @@ export class MacroColor {
if (mode === 'svg') {
// svg 模式下,导出的效果和 webview 渲染效果基本一致,直接导出即可
return rootStyles.getPropertyValue(macroName);
return rootStyles?.getPropertyValue(macroName) || '#fff';
}
// pdf 模式需要对黑色主题的几个特殊颜色进行处理,并对所有透明颜色进行混合处理
@ -208,7 +208,7 @@ export class MacroColor {
}
}
const colorString = rootStyles.getPropertyValue(macroName);
const colorString = rootStyles?.getPropertyValue(macroName) || '#fff';
if (!colorString) {
// 如果 macroName 不存在,返回空字符串
return colorString;

View File

@ -18,6 +18,12 @@ export function getThemeColor(): 'light' | 'dark' {
if (themeColor) {
return themeColor;
}
const myDocument = document as any;
if (!myDocument) {
return 'dark';
}
const rootStyles = getComputedStyle(document.documentElement);
const backgroundColorString = rootStyles.getPropertyValue('--background');
const backgroundColor = Color.parseColor(backgroundColorString);

View File

@ -39,7 +39,9 @@ const router = createRouter({
router.beforeEach((to, from, next) => {
if (to.meta.title) {
const myDocument = document as any;
if (to.meta.title && myDocument) {
document.title = `OpenMCP | ${to.meta.title}`;
}
next();

View File

@ -1,8 +1,7 @@
import { useMessageBridge } from '@/api/message-bridge';
import { reactive, ref } from 'vue';
import { pinkLog } from '../setting/util';
import { arrowMiddleware, ElMessage } from 'element-plus';
import { ILaunchSigature } from '@/hook/type';
import { ElMessage } from 'element-plus';
import { OpenMcpSupportPlatform } from '@/api/platform';
export const connectionMethods = reactive({
@ -172,10 +171,7 @@ async function launchStdio(namespace: string) {
message: msg
});
ElMessage({
type: 'error',
message: msg
});
ElMessage.error(msg);
}
}
@ -226,10 +222,7 @@ async function launchSSE(namespace: string) {
message: msg
});
ElMessage({
type: 'error',
message: msg
});
ElMessage.error(msg);
}
}

View File

@ -26,6 +26,11 @@ export function onGeneralColorChange(colorString: string) {
return;
}
const { r, g, b } = color;
const myDocument = document as any;
if (!myDocument) {
return;
}
document.documentElement.style.setProperty(
'--main-color', `rgb(${r}, ${g}, ${b})`);

View File

@ -1,44 +1,61 @@
const path = require('path');
const TerserPlugin = require('terser-webpack-plugin');
const webpack = require('webpack');
module.exports = {
mode: 'production',
entry: './renderer/src/components/main-panel/chat/core/task-loop-sdk.ts',
output: {
path: path.resolve(__dirname, '../openmcp-sdk'),
filename: 'task-loop-sdk.js',
libraryTarget: 'commonjs2'
},
target: 'node',
resolve: {
extensions: ['.ts', '.js'],
alias: {
'@': path.resolve(__dirname, '../renderer/src'), // 修正路径别名
mode: 'development', // 设置为 development 模式
devtool: 'source-map', // 生成 source map 以便调试
entry: './renderer/src/components/main-panel/chat/core/task-loop.ts',
output: {
path: path.resolve(__dirname, '../openmcp-sdk'),
filename: 'task-loop.js',
libraryTarget: 'commonjs2'
},
},
module: {
rules: [
{
test: /\.ts$/,
use: 'ts-loader',
exclude: /node_modules/,
},
{
test: /\.vue$/,
use: {
loader: 'null-loader'
}
}
target: 'node',
resolve: {
extensions: ['.ts', '.js'],
alias: {
'@': path.resolve(__dirname, '../renderer/src'), // 修正路径别名
},
},
module: {
rules: [
{
test: /\.ts$/,
use: 'ts-loader',
exclude: /node_modules/,
},
{
test: /\.vue$/,
use: {
loader: 'null-loader'
}
}
],
},
optimization: {
minimize: false, // 禁用代码压缩
minimizer: [
new TerserPlugin({
extractComments: false, // 禁用提取许可证文件
}),
],
},
plugins: [
new webpack.DefinePlugin({
window: {
nodejs: true,
navigator: {
userAgent: 2
},
performance: {
now: () => Date.now()
}
}
}),
],
},
optimization: {
minimizer: [
new TerserPlugin({
extractComments: false, // 禁用提取许可证文件
}),
],
},
externals: {
vue: 'vue', // 不打包 vue 库
},
externals: {
vue: 'vue', // 不打包 vue 库
'element-plus': './tool.js'
},
};