修复一轮bug

This commit is contained in:
锦恢 2025-05-08 19:33:38 +08:00
parent 0f947c5b09
commit da482e26a9
28 changed files with 311 additions and 78 deletions

View File

@ -1,5 +1,14 @@
# Change Log
## [main] 0.0.8
- 大模型 API 测试时更加完整的报错
- 修复 0.0.7 引入的bug修改对话无法发出
- 修复 bug富文本编辑器粘贴文本会带样式
- 修复 bug富文本编辑器发送前缀为空的字符会全部为空
- 修复 bug流式传输进行 function calling 时,多工具的索引串流导致的 JSON Schema 反序列化失败
- 修复 bug大模型返回大量重复错误信息
## [main] 0.0.7
- 优化页面布局,使得调试窗口可以显示更多内容
- 扩大默认的上下文长度 10 -> 20

View File

@ -2,7 +2,7 @@
"name": "openmcp",
"displayName": "OpenMCP",
"description": "An all in one MCP Client/TestTool",
"version": "0.0.7",
"version": "0.0.8",
"publisher": "kirigaya",
"author": {
"name": "kirigaya",

View File

@ -1,8 +1,8 @@
@font-face {
font-family: "iconfont"; /* Project id 4870215 */
src: url('iconfont.woff2?t=1746529081655') format('woff2'),
url('iconfont.woff?t=1746529081655') format('woff'),
url('iconfont.ttf?t=1746529081655') format('truetype');
src: url('iconfont.woff2?t=1746703816245') format('woff2'),
url('iconfont.woff?t=1746703816245') format('woff'),
url('iconfont.ttf?t=1746703816245') format('truetype');
}
.iconfont {
@ -13,6 +13,10 @@
-moz-osx-font-smoothing: grayscale;
}
.icon-waiting:before {
content: "\e6d0";
}
.icon-timeout:before {
content: "\edf5";
}

Binary file not shown.

View File

@ -61,6 +61,9 @@ const streamingContent = inject('streamingContent') as Ref<string>;
const streamingToolCalls = inject('streamingToolCalls') as Ref<ToolCall[]>;
const scrollToBottom = inject('scrollToBottom') as () => Promise<void>;
const updateScrollHeight = inject('updateScrollHeight') as () => void;
const chatContext = inject('chatContext') as any;
chatContext.handleSend = handleSend;
function handleSend(newMessage?: string) {
//
@ -77,11 +80,7 @@ function handleSend(newMessage?: string) {
loop.registerOnError((error) => {
ElMessage({
message: error.msg,
type: 'error',
duration: 3000
});
ElMessage.error(error.msg);
if (error.state === MessageState.ReceiveChunkError) {
tabStorage.messages.push({
@ -125,8 +124,6 @@ function handleAbort() {
}
}
provide('handleSend', handleSend);
onMounted(() => {
updateScrollHeight();

View File

@ -10,6 +10,7 @@
class="rich-editor"
:placeholder="placeholder"
@input="handleInput"
@paste="handlePaste"
@keydown.backspace="handleBackspace"
@keydown.enter="handleKeydown"
@compositionstart="handleCompositionStart"
@ -171,6 +172,32 @@ function handleKeydown(event: KeyboardEvent) {
}
}
function handlePaste(event: ClipboardEvent) {
event.preventDefault(); //
const clipboardData = event.clipboardData;
if (clipboardData) {
const pastedText = clipboardData.getData('text/plain');
const editorElement = editor.value;
if (editorElement instanceof HTMLDivElement) {
const selection = window.getSelection();
if (selection && selection.rangeCount > 0) {
const range = selection.getRangeAt(0);
range.deleteContents();
const textNode = document.createTextNode(pastedText);
range.insertNode(textNode);
range.setStartAfter(textNode);
range.collapse(true);
selection.removeAllRanges();
selection.addRange(range);
}
}
}
if (editor.value) {
editor.value.dispatchEvent(new Event('input'));
}
}
function handleCompositionStart() {
isComposing.value = true;
}

View File

@ -12,6 +12,7 @@ export type ChatCompletionChunk = OpenAI.Chat.Completions.ChatCompletionChunk;
export type ChatCompletionCreateParamsBase = OpenAI.Chat.Completions.ChatCompletionCreateParams & { id?: string };
interface TaskLoopOptions {
maxEpochs: number;
maxJsonParseRetry: number;
}
interface IErrorMssage {
@ -19,6 +20,10 @@ interface IErrorMssage {
msg: string
}
interface IDoConversationResult {
stop: boolean;
}
/**
* @description
*/
@ -34,15 +39,19 @@ export class TaskLoop {
private onChunk: (chunk: ChatCompletionChunk) => void = (chunk) => {},
private onDone: () => void = () => {},
private onEpoch: () => void = () => {},
private readonly taskOptions: TaskLoopOptions = { maxEpochs: 20 },
private readonly taskOptions: TaskLoopOptions = { maxEpochs: 20, maxJsonParseRetry: 3 },
) {
}
private async handleToolCalls(toolCalls: ToolCall[]) {
// TODO: 调用多个工具并返回调用结果?
const toolCall = toolCalls[0];
console.log('debug toolcall');
console.log(toolCalls);
let toolName: string;
let toolArgs: Record<string, any>;
@ -131,15 +140,15 @@ export class TaskLoop {
if (currentCall === undefined) {
// 新的工具调用开始
this.streamingToolCalls.value = [{
this.streamingToolCalls.value[toolCall.index] = {
id: toolCall.id,
index: 0,
index: toolCall.index,
type: 'function',
function: {
name: toolCall.function?.name || '',
arguments: toolCall.function?.arguments || ''
}
}];
};
} else {
// 累积现有工具调用的信息
if (currentCall) {
@ -167,16 +176,9 @@ export class TaskLoop {
private doConversation(chatData: ChatCompletionCreateParamsBase) {
return new Promise<void>((resolve, reject) => {
return new Promise<IDoConversationResult>((resolve, reject) => {
const chunkHandler = this.bridge.addCommandListener('llm/chat/completions/chunk', data => {
if (data.code !== 200) {
this.onError({
state: MessageState.ReceiveChunkError,
msg: data.msg || '请求模型服务时发生错误'
});
resolve();
return;
}
// data.code 一定为 200否则不会走这个 route
const { chunk } = data.msg as { chunk: ChatCompletionChunk };
// 处理增量的 content 和 tool_calls
@ -187,11 +189,34 @@ export class TaskLoop {
this.onChunk(chunk);
}, { once: false });
this.bridge.addCommandListener('llm/chat/completions/done', data => {
const doneHandler = this.bridge.addCommandListener('llm/chat/completions/done', data => {
this.onDone();
chunkHandler();
errorHandler();
resolve({
stop: false
});
}, { once: true });
console.log('register error handler');
const errorHandler = this.bridge.addCommandListener('llm/chat/completions/error', data => {
console.log('enter error report');
this.onError({
state: MessageState.ReceiveChunkError,
msg: data.msg || '请求模型服务时发生错误'
});
chunkHandler();
doneHandler();
resolve({
stop: true
});
resolve();
}, { once: true });
this.bridge.postMessage({
@ -273,6 +298,10 @@ export class TaskLoop {
this.onEpoch = handler;
}
public setMaxEpochs(maxEpochs: number) {
this.taskOptions.maxEpochs = maxEpochs;
}
/**
* @description DOM
*/
@ -288,6 +317,8 @@ export class TaskLoop {
}
});
let jsonParseErrorRetryCount = 0;
for (let i = 0; i < this.taskOptions.maxEpochs; ++ i) {
this.onEpoch();
@ -308,7 +339,10 @@ export class TaskLoop {
this.currentChatId = chatData.id!;
// 发送请求
await this.doConversation(chatData);
const doConverationResult = await this.doConversation(chatData);
console.log(doConverationResult);
// 如果存在需要调度的工具
if (this.streamingToolCalls.value.length > 0) {
@ -333,11 +367,25 @@ export class TaskLoop {
if (toolCallResult.state === MessageState.ParseJsonError) {
// 如果是因为解析 JSON 错误,则重新开始
tabStorage.messages.pop();
redLog('解析 JSON 错误 ' + this.streamingToolCalls.value[0]?.function?.arguments);
continue;
}
jsonParseErrorRetryCount ++;
if (toolCallResult.state === MessageState.Success) {
redLog('解析 JSON 错误 ' + this.streamingToolCalls.value[0]?.function?.arguments);
// 如果因为 JSON 错误而失败太多,就只能中断了
if (jsonParseErrorRetryCount >= this.taskOptions.maxJsonParseRetry) {
tabStorage.messages.push({
role: 'assistant',
content: `解析 JSON 错误,无法继续调用工具 (累计错误次数 ${this.taskOptions.maxJsonParseRetry})`,
extraInfo: {
created: Date.now(),
state: toolCallResult.state,
serverName: llms[llmManager.currentModelIndex].id || 'unknown',
usage: undefined
}
});
break;
}
} else if (toolCallResult.state === MessageState.Success) {
const toolCall = this.streamingToolCalls.value[0];
tabStorage.messages.push({
@ -351,10 +399,7 @@ export class TaskLoop {
usage: this.completionUsage
}
});
}
if (toolCallResult.state === MessageState.ToolCall) {
} else if (toolCallResult.state === MessageState.ToolCall) {
const toolCall = this.streamingToolCalls.value[0];
tabStorage.messages.push({
@ -385,7 +430,11 @@ export class TaskLoop {
} else {
// 一些提示
break;
}
// 回答聚合完成后根据 stop 来决定是否提前中断
if (doConverationResult.stop) {
break;
}
}

View File

@ -31,6 +31,18 @@ export function makeUsageStatistic(extraInfo: IExtraInfo): UsageStatistic | unde
total: usage.prompt_tokens + usage.completion_tokens,
cacheHitRatio: Math.ceil(usage.prompt_tokens_details?.cached_tokens || 0 / usage.prompt_tokens * 1000) / 10,
}
default:
if (usage.prompt_tokens && usage.completion_tokens) {
return {
input: usage.prompt_tokens,
output: usage.completion_tokens,
total: usage.prompt_tokens + usage.completion_tokens,
cacheHitRatio: Math.ceil((usage.prompt_tokens_details?.cached_tokens || 0) / usage.prompt_tokens * 1000) / 10,
}
}
return undefined;
}
return undefined;

View File

@ -165,6 +165,11 @@ provide('streamingToolCalls', streamingToolCalls);
provide('isLoading', isLoading);
provide('autoScroll', autoScroll);
const chatContext = {
handleSend: undefined
};
provide('chatContext', chatContext);
// scrollToBottom
async function scrollToBottom() {
if (!scrollbarRef.value || !messageListRef.value) return;

View File

@ -1,13 +1,13 @@
<template>
<div class="message-role">Agent</div>
<div class="message-text">
<div v-if="message.content" v-html="markdownToHtml(props.message.content)"></div>
<div v-if="message.content" v-html="markdownToHtml(messageContent)"></div>
</div>
<MessageMeta :message="props.message" />
</template>
<script setup lang="ts">
import { defineProps } from 'vue';
import { computed, defineProps } from 'vue';
import { markdownToHtml } from '@/components/main-panel/chat/markdown/markdown';
import MessageMeta from './message-meta.vue';
@ -23,6 +23,17 @@ const props = defineProps({
}
});
const messageContent = computed(() => {
if (typeof props.message.content === 'undefined') {
return 'undefined';
}
if (typeof props.message.content === 'object') {
return JSON.stringify(props.message.content, null, 2);
}
return props.message.content.toString();
});
</script>
<style>

View File

@ -1,5 +1,5 @@
<template>
<div class="message-avatar">
<div class="message-avatar streaming-box">
<span class="iconfont icon-chat"></span>
</div>
<div class="message-content">
@ -11,7 +11,7 @@
</span>
</span>
</div>
<div class="message-text">
<div class="message-text streaming-box">
<span v-html="waitingMarkdownToHtml(streamingContent)"></span>
</div>
</div>

View File

@ -38,11 +38,11 @@
<div class="tool-call-header result">
<span class="tool-name">
<span :class="`iconfont icon-${currentMessageLevel}`"></span>
{{ isValid ? '响应': '错误' }}
{{ isValid ? t("response") : t('error') }}
<el-button v-if="!isValid" size="small"
@click="gotoIssue()"
>
反馈
{{ t('feedback') }}
</el-button>
</span>
<span style="width: 200px;" class="tools-dialog-container" v-if="currentMessageLevel === 'info'">
@ -82,7 +82,24 @@
</div>
</div>
</div>
<div v-else style="width: 90%">
<div class="tool-call-header result">
<span class="tool-name">
<span :class="`iconfont icon-waiting`"></span>
{{ t('waiting-mcp-server') }}
</span>
</div>
<div class="tool-result-content">
<div class="progress">
<el-progress
:percentage="100"
:format="() => ''"
:indeterminate="true"
text-inside
/>
</div>
</div>
</div>
<MessageMeta :message="message" />
@ -93,7 +110,8 @@
</template>
<script setup lang="ts">
import { defineProps, ref, watch, PropType, computed, defineEmits } from 'vue';
import { defineProps, ref, watch, PropType, computed, defineEmits, inject, Ref } from 'vue';
import { useI18n } from 'vue-i18n';
import MessageMeta from './message-meta.vue';
import { markdownToHtml } from '@/components/main-panel/chat/markdown/markdown';
@ -103,6 +121,8 @@ import { ToolCallContent } from '@/hook/type';
import ToolcallResultItem from './toolcall-result-item.vue';
const { t } = useI18n();
const props = defineProps({
message: {
type: Object as PropType<IRenderMessage>,
@ -183,7 +203,14 @@ const isValid = computed(() => {
}
});
const currentMessageLevel = computed(() => {
// mcp server
if (!props.message.toolResult) {
return 'info';
}
if (!isValid.value) {
return 'error';
}
@ -223,6 +250,16 @@ function updateToolCallResultItem(value: any, index: number) {
padding: 3px 10px;
}
.tool-result-content .el-progress-bar__outer {
}
.tool-result-content .progress {
border-radius: .5em;
background-color: var(--el-fill-color-light) !important;
padding: 20px 10px;
width: 50%;
}
.message-text.tool_calls.warning {
border: 1px solid var(--el-color-warning);
}

View File

@ -34,7 +34,7 @@
<script setup lang="ts">
import { defineProps, ref, PropType, inject } from 'vue';
import { tabs } from '../../panel';
import { ChatStorage, IRenderMessage } from '../chat';
import type { ChatStorage, IRenderMessage } from '../chat-box/chat';
import KCuteTextarea from '@/components/k-cute-textarea/index.vue';
import { ElMessage } from 'element-plus';
@ -58,7 +58,7 @@ const tabStorage = tab.storage as ChatStorage;
const isEditing = ref(false);
const userInput = ref('');
const handleSend = inject<(newMessage: string | undefined) => void>('handleSend');
const chatContext = inject('chatContext') as any;
const toggleEdit = () => {
isEditing.value = !isEditing.value;
@ -70,10 +70,12 @@ const toggleEdit = () => {
const handleKeydown = (event: KeyboardEvent) => {
const index = tabStorage.messages.findIndex(msg => msg.extraInfo === props.message.extraInfo);
if (index !== -1 && handleSend) {
console.log(chatContext);
if (index !== -1 && chatContext.handleSend) {
// index index
tabStorage.messages.splice(index);
handleSend(userInput.value);
chatContext.handleSend(userInput.value);
isEditing.value = false;
}

View File

@ -152,5 +152,8 @@
"choose-presetting": "اختر الإعداد المسبق",
"cwd": "دليل التنفيذ",
"mcp-server-timeout": "أطول وقت لاستدعاء أداة MCP",
"return": "عودة"
"return": "عودة",
"error": "خطأ",
"feedback": "تعليقات",
"waiting-mcp-server": "في انتظار استجابة خادم MCP"
}

View File

@ -152,5 +152,8 @@
"choose-presetting": "Voreinstellung auswählen",
"cwd": "Ausführungsverzeichnis",
"mcp-server-timeout": "Maximale Aufrufzeit des MCP-Tools",
"return": "Zurück"
"return": "Zurück",
"error": "Fehler",
"feedback": "Feedback",
"waiting-mcp-server": "Warten auf Antwort vom MCP-Server"
}

View File

@ -152,5 +152,8 @@
"choose-presetting": "Select preset",
"cwd": "Execution directory",
"mcp-server-timeout": "Maximum call time of MCP tool",
"return": "Back"
"return": "Back",
"error": "Error",
"feedback": "Feedback",
"waiting-mcp-server": "Waiting for MCP server response"
}

View File

@ -152,5 +152,8 @@
"choose-presetting": "Sélectionner un préréglage",
"cwd": "Répertoire d'exécution",
"mcp-server-timeout": "Temps d'appel maximum de l'outil MCP",
"return": "Retour"
"return": "Retour",
"error": "Erreur",
"feedback": "Retour",
"waiting-mcp-server": "En attente de la réponse du serveur MCP"
}

View File

@ -152,5 +152,8 @@
"choose-presetting": "プリセットを選択",
"cwd": "実行ディレクトリ",
"mcp-server-timeout": "MCPツールの最大呼び出し時間",
"return": "戻る"
"return": "戻る",
"error": "エラー",
"feedback": "フィードバック",
"waiting-mcp-server": "MCPサーバーの応答を待機中"
}

View File

@ -152,5 +152,8 @@
"choose-presetting": "프리셋 선택",
"cwd": "실행 디렉터리",
"mcp-server-timeout": "MCP 도구 최대 호출 시간",
"return": "돌아가기"
"return": "돌아가기",
"error": "오류",
"feedback": "피드백",
"waiting-mcp-server": "MCP 서버 응답 대기 중"
}

View File

@ -152,5 +152,8 @@
"choose-presetting": "Выбрать预设",
"cwd": "Каталог выполнения",
"mcp-server-timeout": "Максимальное время вызова инструмента MCP",
"return": "Назад"
"return": "Назад",
"error": "Ошибка",
"feedback": "Обратная связь",
"waiting-mcp-server": "Ожидание ответа от сервера MCP"
}

View File

@ -152,5 +152,8 @@
"choose-presetting": "选择预设",
"cwd": "执行目录",
"mcp-server-timeout": "MCP工具最长调用时间",
"return": "返回"
"return": "返回",
"error": "错误",
"feedback": "反馈",
"waiting-mcp-server": "等待 MCP 服务器响应"
}

View File

@ -152,5 +152,8 @@
"choose-presetting": "選擇預設",
"cwd": "執行目錄",
"mcp-server-timeout": "MCP工具最長調用時間",
"return": "返回"
"return": "返回",
"error": "錯誤",
"feedback": "反饋",
"waiting-mcp-server": "等待MCP伺服器響應"
}

View File

@ -6,7 +6,7 @@
</span>
<p>
OpenMCP Client 0.0.7 OpenMCP@<a href="https://www.zhihu.com/people/can-meng-zhong-de-che-xian">锦恢</a> 开发
OpenMCP Client 0.0.8 OpenMCP@<a href="https://www.zhihu.com/people/can-meng-zhong-de-che-xian">锦恢</a> 开发
</p>
<p>

View File

@ -2,22 +2,26 @@ import { ChatStorage } from '@/components/main-panel/chat/chat-box/chat';
import { TaskLoop } from '@/components/main-panel/chat/core/task-loop';
import { llmManager } from './llm';
import { reactive, ref } from 'vue';
import { makeUsageStatistic } from '@/components/main-panel/chat/core/usage';
export const llmSettingRef = ref<any>(null);
export const simpleTestResult = reactive<{
done: boolean,
start: boolean,
error: any
error: any,
tps: string | number | undefined
}>({
done: false,
start: false,
error: '',
tps: undefined
});
export function makeSimpleTalk() {
export async function makeSimpleTalk() {
simpleTestResult.done = false;
simpleTestResult.start = true;
simpleTestResult.tps = undefined;
// 使用最简单的 hello 来测试
const testMessage = 'hello';
@ -38,18 +42,35 @@ export function makeSimpleTalk() {
}
};
loop.setMaxEpochs(1);
loop.registerOnDone(() => {
console.log('done');
simpleTestResult.error = '';
simpleTestResult.done = true;
simpleTestResult.start = false;
});
loop.registerOnError(error => {
console.log(error);
simpleTestResult.error = error;
const errorReason = error.msg;
const errorText = JSON.stringify(errorReason);
simpleTestResult.error = errorText;
simpleTestResult.start = false;
});
loop.start(chatStorage, testMessage);
const startTime = performance.now();
await loop.start(chatStorage, testMessage);
const costTime = (performance.now() - startTime!) / 1000;
const message = chatStorage.messages.at(-1);
console.log(chatStorage.messages);
if (message?.extraInfo) {
const usage = message.extraInfo.usage;
if (usage?.prompt_tokens && usage.completion_tokens) {
const total = usage?.prompt_tokens + usage?.completion_tokens;
simpleTestResult.tps = (total / costTime).toFixed(2);
}
}
}

View File

@ -38,7 +38,7 @@
</div>
</div>
<!-- 根据不同模型展示不同的接入点 -->
<!-- TODO: 根据不同模型展示不同的接入点 -->
<div v-if="false">
</div>
@ -80,7 +80,7 @@
<ConnectTest />
<!-- 当前页面的聊天框 -->
<el-dialog v-model="dialogVisible" width="50%" style="min-width: 500px; max-width: 800px;padding: 20px;">
<el-dialog v-model="dialogVisible" width="50%" class="api-man-dialog">
<br>
@ -286,6 +286,17 @@ function handleCommand(command: {type: string, index: number}) {
</script>
<style>
.api-man-dialog {
min-width: 500px;
max-width: 800px;
padding: 20px;
}
.api-man-dialog .el-tag {
background-color: var(--main-light-color) !important;
}
.setting-save-container {
margin: 5px;
}

View File

@ -2,22 +2,25 @@
<div class="connect-test" v-if="simpleTestResult.done || simpleTestResult.error">
<div class="test-result">
<div class="result-item" v-if="simpleTestResult.done">
<span class="iconfont icon-success"></span>
<span>{{ "✅ okey dockey :D" }}</span>
<span class="iconfont icon-dui"></span>
<span>{{ " okey dockey :D" }}</span>
<span v-if="simpleTestResult.tps" class="tps">{{ simpleTestResult.tps }} token/s</span>
<span v-else class="tps">{{ t("server-not-support-statistic") }}</span>
</div>
<div class="result-item error" v-if="simpleTestResult.error">
<span class="iconfont icon-error"></span>
<span>{{ ' ' + simpleTestResult.error }}</span>
<span class="iconfont icon-cuo"></span>
<span>{{ ' ' + simpleTestResult.error }}</span>
</div>
</div>
</div>
</template>
<script setup lang="ts">
import { defineComponent } from 'vue';
import { useI18n } from 'vue-i18n';
import { simpleTestResult } from './api';
defineComponent({ name: 'connect-test' });
const { t } = useI18n();
</script>
<style scoped>
@ -43,6 +46,14 @@ defineComponent({ name: 'connect-test' });
border-radius: 4px;
}
.connect-test .tps {
margin-left: 5px;
color: var(--foreground);
background-color: var(--el-fill-color-light);
padding: 2px 6px;
border-radius: 4px;
}
.result-item.error {
color: var(--el-color-danger);
}

View File

@ -13,7 +13,20 @@ export class LlmController {
};
}
await streamingChatCompletion(data, webview);
try {
await streamingChatCompletion(data, webview);
} catch (error) {
console.log('error' + error);
webview.postMessage({
command: 'llm/chat/completions/error',
data: {
msg: error
}
});
}
return {
code: -1,

View File

@ -58,9 +58,6 @@ export async function streamingChatCompletion(
break;
}
console.log(chunk);
if (chunk.choices) {
const chunkResult = {
code: 200,