task loop

This commit is contained in:
锦恢 2025-06-19 15:19:31 +08:00
parent e6d1f1205a
commit a5fe596cac
3 changed files with 360 additions and 3 deletions

View File

@ -288,9 +288,6 @@ export async function handleXmlWrapperToolcall(toolcall: XmlToolCall): Promise<T
state: MessageState.InvalidXml state: MessageState.InvalidXml
} }
} }
// 进行调用,根据结果返回不同的值
console.log(toolcall);
const toolResponse = await mcpClientAdapter.callTool(toolcall.name, toolcall.parameters); const toolResponse = await mcpClientAdapter.callTool(toolcall.name, toolcall.parameters);
return handleToolResponse(toolResponse); return handleToolResponse(toolResponse);

View File

@ -152,3 +152,60 @@ export class TaskLoopAdapter {
} }
} }
import { TaskLoop } from '../../task-loop.js';
import * as fs from 'fs';
import chalk from 'chalk';
export class OAgent {
private adapter: TaskLoopAdapter;
constructor() {
this.adapter = new TaskLoopAdapter();
}
public addMcp(mcpOption: IConnectionArgs) {
this.adapter.addMcp(mcpOption);
}
/**
* @description Load MCP configuration from file.
* Supports multiple MCP backends and a default LLM model configuration.
*
* Configuration below can be generated from OpenMCP Client directly: https://kirigaya.cn/openmcp
*
* @example
* Example configuration:
* {
* "version": "1.0.0",
* "mcpServers": {
* "openmemory": {
* "command": "npx",
* "args": ["-y", "openmemory"],
* "env": {
* "OPENMEMORY_API_KEY": "YOUR_API_KEY",
* "CLIENT_NAME": "openmemory"
* },
* "description": "A MCP for long-term memory support",
* "prompt": "You are a helpful assistant."
* }
* },
* "llm": {
* "baseURL": "https://api.deepseek.com",
* "apiToken": "YOUR_API_KEY",
* "model": "deepseek-chat"
* }
* }
*
* @param configPath - Path to the configuration file
*/
public loadMcpConfig(configPath: string) {
if (!fs.existsSync(configPath)) {
console.log(
chalk.red(`× configPath not exists! ${configPath}`),
);
throw new Error(`× configPath not exists! ${configPath}`);
}
const config = JSON.parse(fs.readFileSync(configPath, 'utf-8'));
}
}

303
service/task-loop.d.ts vendored Normal file
View File

@ -0,0 +1,303 @@
/* eslint-disable */
import type { OpenAI } from 'openai';
export type ChatCompletionChunk = OpenAI.Chat.Completions.ChatCompletionChunk;
export type ChatCompletionCreateParamsBase = OpenAI.Chat.Completions.ChatCompletionCreateParams & { id?: string };
interface SchemaProperty {
title: string;
type: string;
description?: string;
}
interface InputSchema {
type: string;
properties: Record<string, SchemaProperty>;
required?: string[];
title?: string;
$defs?: any;
}
interface ToolItem {
name: string;
description: string;
inputSchema: InputSchema;
enabled: boolean;
anyOf?: any;
}
interface IExtraInfo {
created: number,
state: MessageState,
serverName: string,
usage?: ChatCompletionChunk['usage'];
enableXmlWrapper: boolean;
[key: string]: any;
}
interface ToolMessage {
role: 'tool';
index: number;
content: ToolCallContent[];
tool_call_id?: string
name?: string // 工具名称,当 role 为 tool
tool_calls?: ToolCall[],
extraInfo: IExtraInfo
}
interface TextMessage {
role: 'user' | 'assistant' | 'system';
content: string;
tool_call_id?: string
name?: string // 工具名称,当 role 为 tool
tool_calls?: ToolCall[],
extraInfo: IExtraInfo
}
export type ChatMessage = ToolMessage | TextMessage;
interface ChatStorage {
messages: ChatMessage[]
settings: ChatSetting
}
interface EnableToolItem {
name: string;
description: string;
enabled: boolean;
inputSchema: InputSchema;
}
export type Ref<T> = {
value: T;
};
export interface ToolCall {
id?: string;
index?: number;
type: string;
function: {
name: string;
arguments: string;
}
}
export interface ToolCallContent {
type: string;
text: string;
[key: string]: any;
}
export interface ToolCallResult {
state: MessageState;
content: ToolCallContent[];
}
export enum MessageState {
ServerError = 'server internal error',
ReceiveChunkError = 'receive chunk error',
Timeout = 'timeout',
MaxEpochs = 'max epochs',
Unknown = 'unknown error',
Abort = 'abort',
ToolCall = 'tool call failed',
None = 'none',
Success = 'success',
ParseJsonError = 'parse json error'
}
export interface IErrorMssage {
state: MessageState;
msg: string;
}
export interface IDoConversationResult {
stop: boolean;
}
export interface TaskLoopOptions {
/**
* The maximum number of epochs (conversation rounds) to perform.
*/
maxEpochs?: number;
/**
* The maximum number of retries allowed when parsing JSON responses fails.
*/
maxJsonParseRetry?: number;
/**
* A custom adapter that can be used to modify behavior or integrate with different environments.
*/
adapter?: any;
/**
* Verbosity level for logging:
* 0 - Silent, 1 - Errors only, 2 - Warnings and errors, 3 - Full debug output.
*/
verbose?: 0 | 1 | 2 | 3;
}
interface ChatSetting {
/**
* Index of the selected language model from a list of available models.
*/
modelIndex: number;
/**
* System-level prompt used to guide the behavior of the assistant.
*/
systemPrompt: string;
/**
* List of tools that are enabled and available during the chat.
*/
enableTools: EnableToolItem[];
/**
* Sampling temperature for generating responses.
* Higher values (e.g., 0.8) make output more random; lower values (e.g., 0.2) make it more focused and deterministic.
*/
temperature: number;
/**
* Whether web search is enabled for enhancing responses with real-time information.
*/
enableWebSearch: boolean;
/**
* Maximum length of the conversation context to keep.
*/
contextLength: number;
/**
* Whether multiple tools can be called in parallel within a single message.
*/
parallelToolCalls: boolean;
/**
* Whether to wrap tool call responses in XML format.
*/
enableXmlWrapper: boolean;
}
/**
* @description
*/
export class TaskLoop {
constructor(taskOptions?: TaskLoopOptions);
/**
* @description make chat data
* @param tabStorage
*/
makeChatData(tabStorage: any): ChatCompletionCreateParamsBase | undefined;
/**
* @description stop the task loop
*/
abort(): void;
/**
* @description Register a callback function triggered on error
* @param handler
*/
registerOnError(handler: (msg: IErrorMssage) => void): void;
/**
* @description Register a callback function triggered on chunk
* @param handler
*/
registerOnChunk(handler: (chunk: ChatCompletionChunk) => void): void;
/**
* @description Register a callback function triggered at the beginning of each epoch
* @param handler
*/
registerOnDone(handler: () => void): void;
/**
* @description Register a callback function triggered at the beginning of each epoch
* @param handler
*/
registerOnEpoch(handler: () => void): void;
/**
* @description Registers a callback function that is triggered when a tool call is completed. This method allows you to intercept and modify the output of the tool call.
* @param handler
*/
registerOnToolCalled(handler: (toolCallResult: ToolCallResult) => ToolCallResult): void;
/**
* @description Register a callback triggered after tool call finishes. You can intercept and modify the output.
* @param handler
*/
registerOnToolCall(handler: (toolCall: ToolCall) => ToolCall): void;
/**
* @description Get current LLM configuration
*/
getLlmConfig(): any;
/**
* @description Set the current LLM configuration, for Node.js environment
* @param config
* @example
* setLlmConfig({
* id: 'openai',
* baseUrl: 'https://api.openai.com/v1',
* userToken: 'sk-xxx',
* userModel: 'gpt-3.5-turbo',
* })
*/
setLlmConfig(config: any): void;
/**
* @description Set proxy server
* @param maxEpochs
*/
setMaxEpochs(maxEpochs: number): void;
/**
* @description bind streaming content and tool calls
*/
bindStreaming(content: Ref<string>, toolCalls: Ref<ToolCall[]>): void;
/**
* @description not finish
*/
connectToService(): Promise<void>;
/**
* @description
* @param proxyServer
*/
setProxyServer(proxyServer: string): void;
/**
* @description Get all available tool list
*/
listTools(): Promise<ToolItem[]>;
/**
* @description Start the loop and asynchronously update the DOM
*/
start(tabStorage: any, userMessage: string): Promise<void>;
/**
* @description Create single conversation context
*/
createStorage(settings?: ChatSetting): Promise<ChatStorage>;
}
export declare const getToolSchema: any;
export declare const useMessageBridge: any;
export declare const llmManager: any;
export declare const llms: any;
export declare const pinkLog: any;
export declare const redLog: any;
export declare const ElMessage: any;
export declare const handleToolCalls: any;
export declare const getPlatform: any;