diff --git a/renderer/src/components/main-panel/chat/index.vue b/renderer/src/components/main-panel/chat/index.vue index a18be91..3ac41bc 100644 --- a/renderer/src/components/main-panel/chat/index.vue +++ b/renderer/src/components/main-panel/chat/index.vue @@ -83,6 +83,7 @@ import Setting from './setting.vue'; import { llmManager, llms } from '@/views/setting/llm'; // 引入 markdown.ts 中的函数 import { markdownToHtml, copyToClipboard } from './markdown'; +import { TaskLoop } from './task-loop'; defineComponent({ name: 'chat' }); @@ -189,149 +190,57 @@ watch(streamingContent, () => { } }, { deep: true }); +let loop: TaskLoop | undefined = undefined; const handleSend = () => { if (!userInput.value.trim() || isLoading.value) return; - autoScroll.value = true; // 发送新消息时恢复自动滚动 - const userMessage = userInput.value.trim(); - tabStorage.messages.push({ role: 'user', content: userMessage }); - - // 后端接受属性 baseURL, apiKey, model, messages, temperature - const baseURL = llms[llmManager.currentModelIndex].baseUrl; - const apiKey = llms[llmManager.currentModelIndex].userToken; - const model = llms[llmManager.currentModelIndex].userModel; - const temperature = tabStorage.settings.temperature; - const tools = getToolSchema(tabStorage.settings.enableTools); - - const userMessages = []; - if (tabStorage.settings.systemPrompt) { - userMessages.push({ - role: 'system', - content: tabStorage.settings.systemPrompt - }); - } - // 如果超出了 tabStorage.settings.contextLength, 则删除最早的消息 - const loadMessages = tabStorage.messages.slice(-tabStorage.settings.contextLength); - userMessages.push(...loadMessages); - - const chatData = { - baseURL, - apiKey, - model, - temperature, - tools, - messages: userMessages, - }; - + autoScroll.value = true; isLoading.value = true; - streamingContent.value = ''; - const chunkHandler = bridge.addCommandListener('llm/chat/completions/chunk', data => { - if (data.code !== 200) { - handleError(data.msg || '请求模型服务时发生错误'); - return; - } - const { chunk } = data.msg; - - const content = chunk.choices[0]?.delta?.content || ''; - const toolCall = chunk.choices[0]?.delta?.tool_calls?.[0]; - - if (content) { - streamingContent.value += content; - scrollToBottom(); - } - - if (toolCall) { - if (toolCall.index === 0) { - // 新的工具调用开始 - streamingToolCalls.value = [{ - id: toolCall.id, - name: toolCall.function?.name || '', - arguments: toolCall.function?.arguments || '' - }]; - } else { - // 累积现有工具调用的信息 - const currentCall = streamingToolCalls.value[toolCall.index]; - if (currentCall) { - if (toolCall.id) { - currentCall.id = toolCall.id; - } - if (toolCall.function?.name) { - currentCall.name = toolCall.function.name; - } - if (toolCall.function?.arguments) { - currentCall.arguments += toolCall.function.arguments; - } - } - } - } - - const finishReason = chunk.choices[0]?.finish_reason; - if (finishReason === 'tool_calls') { - // 工具调用完成,这里可以处理工具调用 - console.log('Tool calls completed:', streamingToolCalls.value); - streamingToolCalls.value = []; - } - }, { once: false }); + const userMessage = userInput.value.trim(); - bridge.addCommandListener('llm/chat/completions/done', data => { - if (data.code !== 200) { - handleError(data.msg || '模型服务处理完成但返回错误'); - return; - } - if (streamingContent.value) { - // 加入消息列表 + loop = new TaskLoop( + streamingContent, + streamingToolCalls, + // onerror + (msg) => { + ElMessage({ + message: msg, + type: 'error', + duration: 3000 + }); tabStorage.messages.push({ role: 'assistant', - content: streamingContent.value + content: `错误: ${msg}` }); - streamingContent.value = ''; - } - // 如果有工具调用结果,也加入消息列表 - if (streamingToolCalls.value.length > 0) { - streamingToolCalls.value.forEach(tool => { - if (tool.id) { - tabStorage.messages.push({ - role: 'tool', - tool_call_id: tool.id, - content: tool.arguments - }); - } - }); - streamingToolCalls.value = []; - } - isLoading.value = false; - chunkHandler(); - }, { once: true }); - bridge.postMessage({ - command: 'llm/chat/completions', - data: chatData - }); + isLoading.value = false; + }, + // onchunk + (chunk) => { + scrollToBottom(); + }, + // ondone + () => { + isLoading.value = false; + scrollToBottom(); + loop = undefined; + } + ); + + loop.start(tabStorage, userMessage); userInput.value = ''; }; const handleAbort = () => { - bridge.postMessage({ - command: 'llm/chat/completions/abort', // 假设后端有对应的中止命令 - data: {} - }); - isLoading.value = false; - streamingContent.value = ''; - ElMessage.info('请求已中止'); -}; - -const handleError = (msg: string) => { - ElMessage.error(msg); - tabStorage.messages.push({ - role: 'assistant', - content: `错误: ${msg}` - }); - streamingContent.value = ''; - isLoading.value = false; + if (loop) { + loop.abort(); + isLoading.value = false; + ElMessage.info('请求已中止'); + } }; onMounted(() => { diff --git a/renderer/src/components/main-panel/chat/setting.vue b/renderer/src/components/main-panel/chat/setting.vue index f194349..d6c9eb7 100644 --- a/renderer/src/components/main-panel/chat/setting.vue +++ b/renderer/src/components/main-panel/chat/setting.vue @@ -3,8 +3,7 @@
- {{ llms[llmManager.currentModelIndex].name }}/{{ - llms[llmManager.currentModelIndex].models[selectedModelIndex] }} + {{ currentServerName }}/{{ currentModelName }}
@@ -142,6 +141,22 @@ const showTemperatureSlider = ref(false); const showContextLengthDialog = ref(false); const showSystemPromptDialog = ref(false); +const currentServerName = computed(() => { + const currentLlm = llms[llmManager.currentModelIndex]; + if (currentLlm) { + return currentLlm.name; + } + return ''; +}); + +const currentModelName = computed(() => { + const currentLlm = llms[llmManager.currentModelIndex]; + if (currentLlm) { + return currentLlm.models[selectedModelIndex.value]; + } + return ''; +}); + const tab = tabs.content[props.tabId]; const tabStorage = tab.storage as ChatStorage & { settings: ChatSetting }; @@ -157,7 +172,6 @@ if (!tabStorage.settings) { } as ChatSetting; } - const selectedModelIndex = ref(llmManager.currentModelIndex); const availableModels = computed(() => { diff --git a/renderer/src/components/main-panel/chat/task-loop.ts b/renderer/src/components/main-panel/chat/task-loop.ts index 8117178..f0ec9b9 100644 --- a/renderer/src/components/main-panel/chat/task-loop.ts +++ b/renderer/src/components/main-panel/chat/task-loop.ts @@ -1,5 +1,6 @@ +/* eslint-disable */ import { Ref } from "vue"; -import { ToolCall, ChatMessage, ChatStorage, getToolSchema } from "./chat"; +import { ToolCall, ChatStorage, getToolSchema } from "./chat"; import { useMessageBridge } from "@/api/message-bridge"; import type { OpenAI } from 'openai'; import { callTool } from "../tool/tools"; @@ -16,15 +17,15 @@ interface TaskLoopOptions { */ export class TaskLoop { private bridge = useMessageBridge(); + private currentChatId = ''; constructor( private readonly streamingContent: Ref, private readonly streamingToolCalls: Ref, - private readonly messages: ChatMessage[], - private readonly taskOptions: TaskLoopOptions = { maxEpochs: 20 }, private readonly onError: (msg: string) => void = (msg) => {}, private readonly onChunk: (chunk: ChatCompletionChunk) => void = (chunk) => {}, private readonly onDone: () => void = () => {}, + private readonly taskOptions: TaskLoopOptions = { maxEpochs: 20 }, ) {} private async handleToolCalls(toolCalls: ToolCall[]) { @@ -83,9 +84,8 @@ export class TaskLoop { private doConversation(chatData: ChatCompletionCreateParamsBase) { - const bridge = useMessageBridge(); return new Promise((resolve, reject) => { - const chunkHandler = bridge.addCommandListener('llm/chat/completions/chunk', data => { + const chunkHandler = this.bridge.addCommandListener('llm/chat/completions/chunk', data => { if (data.code !== 200) { this.onError(data.msg || '请求模型服务时发生错误'); reject(new Error(data.msg || '请求模型服务时发生错误')); @@ -100,14 +100,14 @@ export class TaskLoop { this.onChunk(chunk); }, { once: false }); - bridge.addCommandListener('llm/chat/completions/done', data => { + this.bridge.addCommandListener('llm/chat/completions/done', data => { this.onDone(); chunkHandler(); resolve(); }, { once: true }); - bridge.postMessage({ + this.bridge.postMessage({ command: 'llm/chat/completions', data: chatData }); @@ -149,14 +149,27 @@ export class TaskLoop { return chatData; } + public abort() { + this.bridge.postMessage({ + command: 'llm/chat/completions/abort', + data: { + id: this.currentChatId + } + }); + this.streamingContent.value = ''; + this.streamingToolCalls.value = []; + } + /** * @description 开启循环,异步更新 DOM */ - public async start(tabStorage: ChatStorage) { - // 后端接受属性 baseURL, apiKey, model, messages, temperature - - while (true) { + public async start(tabStorage: ChatStorage, userMessage: string) { + // 添加目前的消息 + tabStorage.messages.push({ role: 'user', content: userMessage }); + + for (let i = 0; i < this.taskOptions.maxEpochs; ++ i) { + // 初始累计清空 this.streamingContent.value = ''; this.streamingToolCalls.value = []; @@ -164,6 +177,8 @@ export class TaskLoop { // 构造 chatData const chatData = this.makeChatData(tabStorage); + this.currentChatId = chatData.id!; + // 发送请求 await this.doConversation(chatData); @@ -179,12 +194,18 @@ export class TaskLoop { content: toolCallResult }); } - + } else if (this.streamingContent.value) { tabStorage.messages.push({ role: 'assistant', content: this.streamingContent.value }); + break; + + } else { + // 一些提示 + + break; } } } diff --git a/renderer/src/components/main-panel/panel.ts b/renderer/src/components/main-panel/panel.ts index f0f5cdf..b28551f 100644 --- a/renderer/src/components/main-panel/panel.ts +++ b/renderer/src/components/main-panel/panel.ts @@ -34,7 +34,14 @@ export const tabs = reactive<{ ], activeIndex: 0, get activeTab() { - return this.content[this.activeIndex]; + return this.content[this.activeIndex] || { + name: 'blank', + icon: 'icon-blank', + type: 'blank', + component: undefined, + componentIndex: -1, + storage: {}, + }; } }); diff --git a/renderer/src/hook/panel.ts b/renderer/src/hook/panel.ts index e7db425..7e2dbb5 100644 --- a/renderer/src/hook/panel.ts +++ b/renderer/src/hook/panel.ts @@ -52,7 +52,7 @@ export function loadPanels() { }); } -let debounceHandler: number; +let debounceHandler: NodeJS.Timeout; export function safeSavePanels() { clearTimeout(debounceHandler); diff --git a/renderer/src/views/debug/index.vue b/renderer/src/views/debug/index.vue index 5df7d76..3a718fe 100644 --- a/renderer/src/views/debug/index.vue +++ b/renderer/src/views/debug/index.vue @@ -16,7 +16,7 @@