From c9a2d47c854653b9b94dc06b384ccab1214fd5d3 Mon Sep 17 00:00:00 2001 From: Kirigaya <1193466151@qq.com> Date: Thu, 19 Jun 2025 14:20:18 +0800 Subject: [PATCH] task loop --- .../main-panel/chat/core/task-loop.ts | 38 +++- resources/openmcp-sdk-release/package.json | 2 +- resources/openmcp-sdk-release/task-loop.d.ts | 186 ++++++++++++++---- 3 files changed, 188 insertions(+), 38 deletions(-) diff --git a/renderer/src/components/main-panel/chat/core/task-loop.ts b/renderer/src/components/main-panel/chat/core/task-loop.ts index b338ef1..900c65e 100644 --- a/renderer/src/components/main-panel/chat/core/task-loop.ts +++ b/renderer/src/components/main-panel/chat/core/task-loop.ts @@ -1,6 +1,6 @@ /* eslint-disable */ import { ref, type Ref } from "vue"; -import { type ToolCall, type ChatStorage, getToolSchema, MessageState, type ChatMessage } from "../chat-box/chat"; +import { type ToolCall, type ChatStorage, getToolSchema, MessageState, type ChatMessage, type ChatSetting, type EnableToolItem } from "../chat-box/chat"; import { useMessageBridge, MessageBridge, createMessageBridge } from "@/api/message-bridge"; import type { OpenAI } from 'openai'; import { llmManager, llms, type BasicLlmDescription } from "@/views/setting/llm"; @@ -737,4 +737,40 @@ export class TaskLoop { } } } + + public async createStorage(settings?: ChatSetting): Promise { + let { + enableXmlWrapper = false, + systemPrompt = '', + temperature = 0.6, + contextLength = 100, + parallelToolCalls = true, + enableWebSearch = false, + enableTools = undefined, + } = settings || {}; + + if (enableTools === undefined) { + // 默认缺省的情况下使用全部工具 + const tools = await this.listTools(); + enableTools = tools.map(tool => ({ + ...tool, + enabled: true + })) as EnableToolItem[]; + } + + const _settings = { + enableXmlWrapper, + systemPrompt, + temperature, + contextLength, + parallelToolCalls, + enableTools, + enableWebSearch + } as ChatSetting; + + return { + messages: [], + settings: _settings + } + } } \ No newline at end of file diff --git a/resources/openmcp-sdk-release/package.json b/resources/openmcp-sdk-release/package.json index 494ef58..bc835e0 100644 --- a/resources/openmcp-sdk-release/package.json +++ b/resources/openmcp-sdk-release/package.json @@ -1,6 +1,6 @@ { "name": "openmcp-sdk", - "version": "0.0.7", + "version": "0.0.8", "description": "openmcp-sdk", "scripts": { "test": "echo \"Error: no test specified\" && exit 1" diff --git a/resources/openmcp-sdk-release/task-loop.d.ts b/resources/openmcp-sdk-release/task-loop.d.ts index 933c9ad..76965b6 100644 --- a/resources/openmcp-sdk-release/task-loop.d.ts +++ b/resources/openmcp-sdk-release/task-loop.d.ts @@ -4,20 +4,13 @@ import type { OpenAI } from 'openai'; export type ChatCompletionChunk = OpenAI.Chat.Completions.ChatCompletionChunk; export type ChatCompletionCreateParamsBase = OpenAI.Chat.Completions.ChatCompletionCreateParams & { id?: string }; -export interface TaskLoopOptions { - maxEpochs?: number; - maxJsonParseRetry?: number; - adapter?: any; - verbose?: 0 | 1 | 2 | 3; -} - -export interface SchemaProperty { +interface SchemaProperty { title: string; type: string; description?: string; } -export interface InputSchema { +interface InputSchema { type: string; properties: Record; required?: string[]; @@ -25,7 +18,7 @@ export interface InputSchema { $defs?: any; } -export interface ToolItem { +interface ToolItem { name: string; description: string; inputSchema: InputSchema; @@ -33,6 +26,50 @@ export interface ToolItem { anyOf?: any; } +interface IExtraInfo { + created: number, + state: MessageState, + serverName: string, + usage?: ChatCompletionChunk['usage']; + enableXmlWrapper: boolean; + [key: string]: any; +} + + +interface ToolMessage { + role: 'tool'; + index: number; + content: ToolCallContent[]; + tool_call_id?: string + name?: string // 工具名称,当 role 为 tool + tool_calls?: ToolCall[], + extraInfo: IExtraInfo +} + +interface TextMessage { + role: 'user' | 'assistant' | 'system'; + content: string; + tool_call_id?: string + name?: string // 工具名称,当 role 为 tool + tool_calls?: ToolCall[], + extraInfo: IExtraInfo +} + +export type ChatMessage = ToolMessage | TextMessage; + +interface ChatStorage { + messages: ChatMessage[] + settings: ChatSetting +} + +interface EnableToolItem { + name: string; + description: string; + enabled: boolean; + inputSchema: InputSchema; +} + + export type Ref = { value: T; }; @@ -80,68 +117,132 @@ export interface IDoConversationResult { stop: boolean; } +export interface TaskLoopOptions { + /** + * The maximum number of epochs (conversation rounds) to perform. + */ + maxEpochs?: number; + + /** + * The maximum number of retries allowed when parsing JSON responses fails. + */ + maxJsonParseRetry?: number; + + /** + * A custom adapter that can be used to modify behavior or integrate with different environments. + */ + adapter?: any; + + /** + * Verbosity level for logging: + * 0 - Silent, 1 - Errors only, 2 - Warnings and errors, 3 - Full debug output. + */ + verbose?: 0 | 1 | 2 | 3; +} + +interface ChatSetting { + /** + * Index of the selected language model from a list of available models. + */ + modelIndex: number; + + /** + * System-level prompt used to guide the behavior of the assistant. + */ + systemPrompt: string; + + /** + * List of tools that are enabled and available during the chat. + */ + enableTools: EnableToolItem[]; + + /** + * Sampling temperature for generating responses. + * Higher values (e.g., 0.8) make output more random; lower values (e.g., 0.2) make it more focused and deterministic. + */ + temperature: number; + + /** + * Whether web search is enabled for enhancing responses with real-time information. + */ + enableWebSearch: boolean; + + /** + * Maximum length of the conversation context to keep. + */ + contextLength: number; + + /** + * Whether multiple tools can be called in parallel within a single message. + */ + parallelToolCalls: boolean; + + /** + * Whether to wrap tool call responses in XML format. + */ + enableXmlWrapper: boolean; +} + /** * @description 对任务循环进行的抽象封装 */ export class TaskLoop { - private streamingContent; - private streamingToolCalls; - private readonly taskOptions; - private bridge; - private currentChatId; - private onError; - private onChunk; - private onDone; - private onToolCalled; - private onEpoch; - private completionUsage; - private llmConfig; constructor(taskOptions?: TaskLoopOptions); - private handleChunkDeltaContent; - private handleChunkDeltaToolCalls; - private handleChunkUsage; - private doConversation; + + /** + * @description make chat data + * @param tabStorage + */ makeChatData(tabStorage: any): ChatCompletionCreateParamsBase | undefined; + + /** + * @description stop the task loop + */ abort(): void; /** - * @description 注册 error 发生时触发的回调函数 + * @description Register a callback function triggered on error * @param handler */ registerOnError(handler: (msg: IErrorMssage) => void): void; + + /** + * @description Register a callback function triggered on chunk + * @param handler + */ registerOnChunk(handler: (chunk: ChatCompletionChunk) => void): void; /** - * @description 注册 chat.completion 完成时触发的回调函数 + * @description Register a callback function triggered at the beginning of each epoch * @param handler */ registerOnDone(handler: () => void): void; /** - * @description 注册每一个 epoch 开始时触发的回调函数 + * @description Register a callback function triggered at the beginning of each epoch * @param handler */ registerOnEpoch(handler: () => void): void; /** - * @description 注册当工具调用完成时的回调函数,会调用这个方法,可以拦截并修改 toolcall 的输出 + * @description Registers a callback function that is triggered when a tool call is completed. This method allows you to intercept and modify the output of the tool call. * @param handler */ registerOnToolCalled(handler: (toolCallResult: ToolCallResult) => ToolCallResult): void; /** - * @description 注册当工具调用前的回调函数,可以拦截并修改 toolcall 的输入 + * @description Register a callback triggered after tool call finishes. You can intercept and modify the output. * @param handler */ registerOnToolCall(handler: (toolCall: ToolCall) => ToolCall): void; /** - * @description 获取当前的 LLM 配置 + * @description Get current LLM configuration */ getLlmConfig(): any; /** - * @description 设置当前的 LLM 配置,用于 nodejs 环境运行 + * @description Set the current LLM configuration, for Node.js environment * @param config * @example * setLlmConfig({ @@ -154,11 +255,19 @@ export class TaskLoop { setLlmConfig(config: any): void; /** - * @description 设置最大 epoch 次数 + * @description Set proxy server * @param maxEpochs */ setMaxEpochs(maxEpochs: number): void; + + /** + * @description bind streaming content and tool calls + */ bindStreaming(content: Ref, toolCalls: Ref): void; + + /** + * @description not finish + */ connectToService(): Promise; /** @@ -168,14 +277,19 @@ export class TaskLoop { setProxyServer(proxyServer: string): void; /** - * @description 获取所有可用的工具列表 + * @description Get all available tool list */ listTools(): Promise; /** - * @description 开启循环,异步更新 DOM + * @description Start the loop and asynchronously update the DOM */ start(tabStorage: any, userMessage: string): Promise; + + /** + * @description Create single conversation context + */ + createStorage(settings?: ChatSetting): Promise; } export declare const getToolSchema: any;