support grok3 & support model update
This commit is contained in:
parent
f8447cadb6
commit
c252ed0b9f
@ -1,8 +1,10 @@
|
||||
# Change Log
|
||||
|
||||
## [main] 0.1.4
|
||||
- 支持 Google Gemini 模型。
|
||||
- 支持 Grok3 的 tool call 流式传输。
|
||||
- 重新实现 openai 协议的底层网络实现,从而支持 Google Gemini 全系列模型。
|
||||
- 实现 index 适配器,从而支持 Grok3 全系列模型。
|
||||
- 解决 issue#23 插件创建连接时报错“Cannot read properties of undefined (reading 'name')”
|
||||
- 在填写 apikey 和 baseurl 的情况下,现在可以一键刷新模型列表,避免用户手动输入模型列表。
|
||||
|
||||
## [main] 0.1.3
|
||||
- 解决 issue#21 点击按钮后的发送文本后不会清空当前的输入框。
|
||||
|
@ -2,7 +2,7 @@
|
||||
"name": "openmcp",
|
||||
"displayName": "OpenMCP",
|
||||
"description": "An all in one MCP Client/TestTool",
|
||||
"version": "0.1.3",
|
||||
"version": "0.1.4",
|
||||
"publisher": "kirigaya",
|
||||
"author": {
|
||||
"name": "kirigaya",
|
||||
|
@ -14,7 +14,8 @@ export enum MessageState {
|
||||
ToolCall = 'tool call failed',
|
||||
None = 'none',
|
||||
Success = 'success',
|
||||
ParseJsonError = 'parse json error'
|
||||
ParseJsonError = 'parse json error',
|
||||
NoToolFunction = 'no tool function',
|
||||
}
|
||||
|
||||
export interface IExtraInfo {
|
||||
@ -69,15 +70,7 @@ export interface ChatStorage {
|
||||
settings: ChatSetting
|
||||
}
|
||||
|
||||
export interface ToolCall {
|
||||
id?: string;
|
||||
index?: number;
|
||||
type: string;
|
||||
function: {
|
||||
name: string;
|
||||
arguments: string;
|
||||
}
|
||||
}
|
||||
export type ToolCall = OpenAI.Chat.Completions.ChatCompletionChunk.Choice.Delta.ToolCall;
|
||||
|
||||
interface PromptTextItem {
|
||||
type: 'prompt'
|
||||
|
@ -1,16 +1,32 @@
|
||||
import type { ToolCallContent, ToolCallResponse } from "@/hook/type";
|
||||
import { MessageState, type ToolCall } from "../chat-box/chat";
|
||||
import { mcpClientAdapter } from "@/views/connect/core";
|
||||
import type { BasicLlmDescription } from "@/views/setting/llm";
|
||||
import { redLog } from "@/views/setting/util";
|
||||
|
||||
export interface ToolCallResult {
|
||||
state: MessageState;
|
||||
content: ToolCallContent[];
|
||||
}
|
||||
|
||||
export type IToolCallIndex = number;
|
||||
|
||||
export async function handleToolCalls(toolCall: ToolCall): Promise<ToolCallResult> {
|
||||
|
||||
if (!toolCall.function) {
|
||||
return {
|
||||
content: [{
|
||||
type: 'error',
|
||||
text: 'no tool function'
|
||||
}],
|
||||
state: MessageState.NoToolFunction
|
||||
}
|
||||
}
|
||||
|
||||
// 反序列化 streaming 来的参数字符串
|
||||
const toolName = toolCall.function.name;
|
||||
const argsResult = deserializeToolCallResponse(toolCall.function.arguments);
|
||||
// TODO: check as string
|
||||
const toolName = toolCall.function.name as string;
|
||||
const argsResult = deserializeToolCallResponse(toolCall.function.arguments as string);
|
||||
|
||||
if (argsResult.error) {
|
||||
return {
|
||||
@ -47,8 +63,7 @@ function deserializeToolCallResponse(toolArgs: string) {
|
||||
function handleToolResponse(toolResponse: ToolCallResponse) {
|
||||
if (typeof toolResponse === 'string') {
|
||||
// 如果是 string,说明是错误信息
|
||||
console.log(toolResponse);
|
||||
|
||||
redLog('error happen' + JSON.stringify(toolResponse));
|
||||
|
||||
return {
|
||||
content: [{
|
||||
@ -84,3 +99,37 @@ function parseErrorObject(error: any): string {
|
||||
return error.toString();
|
||||
}
|
||||
}
|
||||
|
||||
function grokIndexAdapter(toolCall: ToolCall, callId2Index: Map<string, number>): IToolCallIndex {
|
||||
// grok 采用 id 作为 index,需要将 id 映射到 zero-based 的 index
|
||||
if (!toolCall.id) {
|
||||
return 0;
|
||||
}
|
||||
if (!callId2Index.has(toolCall.id)) {
|
||||
callId2Index.set(toolCall.id, callId2Index.size);
|
||||
}
|
||||
return callId2Index.get(toolCall.id)!;
|
||||
}
|
||||
|
||||
function geminiIndexAdapter(toolCall: ToolCall): IToolCallIndex {
|
||||
// TODO: 等待后续支持
|
||||
return 0;
|
||||
}
|
||||
|
||||
function defaultIndexAdapter(toolCall: ToolCall): IToolCallIndex {
|
||||
return toolCall.index || 0;
|
||||
}
|
||||
|
||||
export function getToolCallIndexAdapter(llm: BasicLlmDescription) {
|
||||
|
||||
if (llm.userModel.startsWith('gemini')) {
|
||||
return geminiIndexAdapter;
|
||||
}
|
||||
|
||||
if (llm.userModel.startsWith('grok')) {
|
||||
const callId2Index = new Map<string, number>();
|
||||
return (toolCall: ToolCall) => grokIndexAdapter(toolCall, callId2Index);
|
||||
}
|
||||
|
||||
return defaultIndexAdapter;
|
||||
}
|
@ -3,10 +3,10 @@ import { ref, type Ref } from "vue";
|
||||
import { type ToolCall, type ChatStorage, getToolSchema, MessageState } from "../chat-box/chat";
|
||||
import { useMessageBridge, MessageBridge, createMessageBridge } from "@/api/message-bridge";
|
||||
import type { OpenAI } from 'openai';
|
||||
import { llmManager, llms } from "@/views/setting/llm";
|
||||
import { llmManager, llms, type BasicLlmDescription } from "@/views/setting/llm";
|
||||
import { pinkLog, redLog } from "@/views/setting/util";
|
||||
import { ElMessage } from "element-plus";
|
||||
import { handleToolCalls, type ToolCallResult } from "./handle-tool-calls";
|
||||
import { getToolCallIndexAdapter, handleToolCalls, type IToolCallIndex, type ToolCallResult } from "./handle-tool-calls";
|
||||
import { getPlatform } from "@/api/platform";
|
||||
import { getSystemPrompt } from "../chat-box/options/system-prompt";
|
||||
import { mcpSetting } from "@/hook/mcp";
|
||||
@ -45,7 +45,7 @@ export class TaskLoop {
|
||||
private onToolCalled: (toolCallResult: ToolCallResult) => ToolCallResult = toolCallResult => toolCallResult;
|
||||
private onEpoch: () => void = () => {};
|
||||
private completionUsage: ChatCompletionChunk['usage'] | undefined;
|
||||
private llmConfig: any;
|
||||
private llmConfig?: BasicLlmDescription;
|
||||
|
||||
constructor(
|
||||
private readonly taskOptions: TaskLoopOptions = { maxEpochs: 20, maxJsonParseRetry: 3, adapter: undefined },
|
||||
@ -76,7 +76,7 @@ export class TaskLoop {
|
||||
}
|
||||
}
|
||||
|
||||
private handleChunkDeltaToolCalls(chunk: ChatCompletionChunk) {
|
||||
private handleChunkDeltaToolCalls(chunk: ChatCompletionChunk, toolcallIndexAdapter: (toolCall: ToolCall) => IToolCallIndex) {
|
||||
const toolCall = chunk.choices[0]?.delta?.tool_calls?.[0];
|
||||
|
||||
if (toolCall) {
|
||||
@ -84,7 +84,8 @@ export class TaskLoop {
|
||||
console.warn('tool_call.index is undefined or null');
|
||||
}
|
||||
|
||||
const index = toolCall.index || 0;
|
||||
|
||||
const index = toolcallIndexAdapter(toolCall);
|
||||
const currentCall = this.streamingToolCalls.value[index];
|
||||
|
||||
if (currentCall === undefined) {
|
||||
@ -105,10 +106,10 @@ export class TaskLoop {
|
||||
currentCall.id = toolCall.id;
|
||||
}
|
||||
if (toolCall.function?.name) {
|
||||
currentCall.function.name = toolCall.function.name;
|
||||
currentCall.function!.name = toolCall.function.name;
|
||||
}
|
||||
if (toolCall.function?.arguments) {
|
||||
currentCall.function.arguments += toolCall.function.arguments;
|
||||
currentCall.function!.arguments += toolCall.function.arguments;
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -123,7 +124,7 @@ export class TaskLoop {
|
||||
}
|
||||
}
|
||||
|
||||
private doConversation(chatData: ChatCompletionCreateParamsBase) {
|
||||
private doConversation(chatData: ChatCompletionCreateParamsBase, toolcallIndexAdapter: (toolCall: ToolCall) => IToolCallIndex) {
|
||||
|
||||
return new Promise<IDoConversationResult>((resolve, reject) => {
|
||||
const chunkHandler = this.bridge.addCommandListener('llm/chat/completions/chunk', data => {
|
||||
@ -134,7 +135,7 @@ export class TaskLoop {
|
||||
|
||||
// 处理增量的 content 和 tool_calls
|
||||
this.handleChunkDeltaContent(chunk);
|
||||
this.handleChunkDeltaToolCalls(chunk);
|
||||
this.handleChunkDeltaToolCalls(chunk, toolcallIndexAdapter);
|
||||
this.handleChunkUsage(chunk);
|
||||
|
||||
this.onChunk(chunk);
|
||||
@ -352,9 +353,11 @@ export class TaskLoop {
|
||||
}
|
||||
|
||||
this.currentChatId = chatData.id!;
|
||||
const llm = this.getLlmConfig();
|
||||
const toolcallIndexAdapter = getToolCallIndexAdapter(llm);
|
||||
|
||||
// 发送请求
|
||||
const doConverationResult = await this.doConversation(chatData);
|
||||
const doConverationResult = await this.doConversation(chatData, toolcallIndexAdapter);
|
||||
|
||||
console.log('[doConverationResult] Response');
|
||||
console.log(doConverationResult);
|
||||
@ -405,7 +408,7 @@ export class TaskLoop {
|
||||
} else if (toolCallResult.state === MessageState.Success) {
|
||||
tabStorage.messages.push({
|
||||
role: 'tool',
|
||||
index: toolCall.index || 0,
|
||||
index: toolcallIndexAdapter(toolCall),
|
||||
tool_call_id: toolCall.id || '',
|
||||
content: toolCallResult.content,
|
||||
extraInfo: {
|
||||
@ -419,8 +422,8 @@ export class TaskLoop {
|
||||
|
||||
tabStorage.messages.push({
|
||||
role: 'tool',
|
||||
index: toolCall.index || 0,
|
||||
tool_call_id: toolCall.id || toolCall.function.name,
|
||||
index: toolcallIndexAdapter(toolCall),
|
||||
tool_call_id: toolCall.id || toolCall.function!.name,
|
||||
content: toolCallResult.content,
|
||||
extraInfo: {
|
||||
created: Date.now(),
|
||||
|
@ -6,7 +6,7 @@
|
||||
</span>
|
||||
|
||||
<p>
|
||||
OpenMCP Client 0.1.3 由 OpenMCP@<a href="https://www.zhihu.com/people/can-meng-zhong-de-che-xian">锦恢</a> 开发
|
||||
OpenMCP Client 0.1.4 由 OpenMCP@<a href="https://www.zhihu.com/people/can-meng-zhong-de-che-xian">锦恢</a> 开发
|
||||
</p>
|
||||
|
||||
<p>
|
||||
|
@ -51,10 +51,21 @@
|
||||
<div class="setting-save-container">
|
||||
<el-button
|
||||
id="add-new-server-button"
|
||||
type="success" @click="addNewServer">
|
||||
type="success"
|
||||
@click="addNewServer"
|
||||
>
|
||||
{{ t("add-new-server") }}
|
||||
</el-button>
|
||||
|
||||
<el-button
|
||||
id="add-new-server-button"
|
||||
type="success"
|
||||
@click="updateModels"
|
||||
:loading="updateModelLoading"
|
||||
>
|
||||
{{ "更新模型列表" }}
|
||||
</el-button>
|
||||
|
||||
<el-button
|
||||
type="primary"
|
||||
id="test-llm-button"
|
||||
@ -120,6 +131,7 @@ import { pinkLog } from './util';
|
||||
import ConnectInterfaceOpenai from './connect-interface-openai.vue';
|
||||
import ConnectTest from './connect-test.vue';
|
||||
import { llmSettingRef, makeSimpleTalk, simpleTestResult } from './api';
|
||||
import { useMessageBridge } from '@/api/message-bridge';
|
||||
|
||||
defineComponent({ name: 'api' });
|
||||
const { t } = useI18n();
|
||||
@ -212,6 +224,35 @@ function addNewProvider() {
|
||||
};
|
||||
}
|
||||
|
||||
|
||||
const updateModelLoading = ref(false);
|
||||
|
||||
async function updateModels() {
|
||||
updateModelLoading.value = true;
|
||||
|
||||
const llm = llms[llmManager.currentModelIndex];
|
||||
const apiKey = llm.userToken;
|
||||
const baseURL = llm.baseUrl;
|
||||
|
||||
const bridge = useMessageBridge();
|
||||
const { code, msg } = await bridge.commandRequest('llm/models', {
|
||||
apiKey,
|
||||
baseURL
|
||||
});
|
||||
|
||||
if (code === 200 && Array.isArray(msg)) {
|
||||
const models = msg
|
||||
.filter(item => item.object === 'model')
|
||||
.map(item => item.id);
|
||||
|
||||
llm.models = models;
|
||||
saveLlmSetting();
|
||||
} else {
|
||||
ElMessage.error('模型列表更新失败' + msg);
|
||||
}
|
||||
updateModelLoading.value = false;
|
||||
}
|
||||
|
||||
function updateProvider() {
|
||||
if (editingIndex.value < 0) {
|
||||
return;
|
||||
|
@ -1,4 +1,7 @@
|
||||
<template>
|
||||
<div class="extra-info warning" v-if="isGoogle">
|
||||
当前模型组协议兼容性较差,特别是 gemini-2.0-flash 模型的函数调用能力不稳定;如果想要稳定使用 gemini 的服务,请尽可能使用最新的模型或者使用 newApi 进行协议转接。
|
||||
</div>
|
||||
<div class="connect-test" v-if="simpleTestResult.done || simpleTestResult.error">
|
||||
<div class="test-result">
|
||||
<div class="result-item" v-if="simpleTestResult.done">
|
||||
@ -18,9 +21,19 @@
|
||||
<script setup lang="ts">
|
||||
import { useI18n } from 'vue-i18n';
|
||||
import { simpleTestResult } from './api';
|
||||
import { llmManager, llms } from './llm';
|
||||
import { computed } from '@vue/reactivity';
|
||||
|
||||
const { t } = useI18n();
|
||||
|
||||
const isGoogle = computed(() => {
|
||||
const model = llms[llmManager.currentModelIndex];
|
||||
return model.userModel.startsWith('gemini') || model.baseUrl.includes('googleapis');
|
||||
});
|
||||
|
||||
console.log(llms[llmManager.currentModelIndex]);
|
||||
|
||||
|
||||
</script>
|
||||
|
||||
<style scoped>
|
||||
@ -61,4 +74,12 @@ const { t } = useI18n();
|
||||
.result-item .iconfont {
|
||||
font-size: 16px;
|
||||
}
|
||||
|
||||
.extra-info.warning {
|
||||
background-color: rgba(230, 162, 60, 0.5);
|
||||
padding: 10px;
|
||||
border-radius: 4px;
|
||||
margin-top: 15px;
|
||||
margin-bottom: 10px;
|
||||
}
|
||||
</style>
|
@ -6,12 +6,25 @@ import type { ToolCall } from '@/components/main-panel/chat/chat-box/chat';
|
||||
import I18n from '@/i18n';
|
||||
const { t } = I18n.global;
|
||||
|
||||
export const llms = reactive<any[]>([]);
|
||||
export const llms = reactive<BasicLlmDescription[]>([]);
|
||||
|
||||
export const llmManager = reactive({
|
||||
currentModelIndex: 0,
|
||||
});
|
||||
|
||||
export interface BasicLlmDescription {
|
||||
id: string,
|
||||
name: string,
|
||||
baseUrl: string,
|
||||
models: string[],
|
||||
isOpenAICompatible: boolean,
|
||||
description: string,
|
||||
website: string,
|
||||
userToken: string,
|
||||
userModel: string,
|
||||
[key: string]: any
|
||||
}
|
||||
|
||||
export function createTest(call: ToolCall) {
|
||||
const tab = createTab('tool', 0);
|
||||
tab.componentIndex = 2;
|
||||
@ -21,8 +34,8 @@ export function createTest(call: ToolCall) {
|
||||
|
||||
const storage: ToolStorage = {
|
||||
activeNames: [0],
|
||||
currentToolName: call.function.name,
|
||||
formData: JSON.parse(call.function.arguments)
|
||||
currentToolName: call.function!.name!,
|
||||
formData: JSON.parse(call.function!.arguments!)
|
||||
};
|
||||
|
||||
tab.storage = storage;
|
||||
|
@ -1,4 +1,4 @@
|
||||
import axios, { AxiosProxyConfig } from "axios";
|
||||
import axios from "axios";
|
||||
import { HttpsProxyAgent } from 'https-proxy-agent';
|
||||
|
||||
interface FetchOptions {
|
||||
|
@ -47,6 +47,30 @@ export const llms = [
|
||||
userToken: '',
|
||||
userModel: 'doubao-1.5-pro-32k'
|
||||
},
|
||||
{
|
||||
id: 'gemini',
|
||||
name: 'Gemini',
|
||||
baseUrl: 'https://generativelanguage.googleapis.com/v1beta/openai/',
|
||||
models: ['gemini-2.0-flash', 'gemini-2.5-flash-preview-05-20', 'gemini-2.5-pro-preview-05-06'],
|
||||
provider: 'google',
|
||||
isOpenAICompatible: true,
|
||||
description: 'Google Gemini',
|
||||
website: 'https://ai.google.dev/gemini-api/docs/models?hl=zh-cn%2F%2Fgemini-2.5-pro-preview-05-06#gemini-2.5-pro-preview-05-06',
|
||||
userToken: '',
|
||||
userModel: 'gemini-2.0-flash'
|
||||
},
|
||||
{
|
||||
id: 'grok',
|
||||
name: 'Grok',
|
||||
baseUrl: 'https://api.x.ai/v1',
|
||||
models: ['grok-3', 'grok-3-fast', 'grok-3-mini', 'grok-3-mini-fast'],
|
||||
provider: 'xai',
|
||||
isOpenAICompatible: true,
|
||||
description: 'xAI Grok',
|
||||
website: 'https://docs.x.ai/docs/models',
|
||||
userToken: '',
|
||||
userModel: 'grok-3-mini'
|
||||
},
|
||||
{
|
||||
id: 'mistral',
|
||||
name: 'Mistral',
|
||||
|
@ -1,7 +1,7 @@
|
||||
import { Controller, RequestClientType } from "../common";
|
||||
import { OpenAI } from "openai";
|
||||
import { Controller } from "../common";
|
||||
import { RequestData } from "../common/index.dto";
|
||||
import { PostMessageble } from "../hook/adapter";
|
||||
import { getClient } from "../mcp/connect.service";
|
||||
import { abortMessageService, streamingChatCompletion } from "./llm.service";
|
||||
|
||||
export class LlmController {
|
||||
@ -34,4 +34,20 @@ export class LlmController {
|
||||
return abortMessageService(data, webview);
|
||||
}
|
||||
|
||||
|
||||
@Controller('llm/models')
|
||||
async getModels(data: RequestData, webview: PostMessageble) {
|
||||
const {
|
||||
baseURL,
|
||||
apiKey,
|
||||
} = data;
|
||||
|
||||
const client = new OpenAI({ apiKey, baseURL });
|
||||
const models = await client.models.list();
|
||||
|
||||
return {
|
||||
code: 200,
|
||||
msg: models.data
|
||||
}
|
||||
}
|
||||
}
|
@ -31,9 +31,8 @@ export async function streamingChatCompletion(
|
||||
|
||||
console.log('openai fetch begin, proxyServer:', proxyServer);
|
||||
|
||||
if (model.startsWith('gemini')) {
|
||||
if (model.startsWith('gemini') && init) {
|
||||
// 该死的 google
|
||||
if (init) {
|
||||
init.headers = {
|
||||
'Content-Type': 'application/json',
|
||||
'Authorization': `Bearer ${apiKey}`
|
||||
@ -41,9 +40,6 @@ export async function streamingChatCompletion(
|
||||
}
|
||||
|
||||
return await axiosFetch(input, init, { proxyServer });
|
||||
} else {
|
||||
return await fetch(input, init);
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
@ -53,6 +49,9 @@ export async function streamingChatCompletion(
|
||||
|
||||
await postProcessMessages(messages);
|
||||
|
||||
console.log('seriableTools', seriableTools);
|
||||
console.log('seriableParallelToolCalls', seriableParallelToolCalls);
|
||||
|
||||
const stream = await client.chat.completions.create({
|
||||
model,
|
||||
messages,
|
||||
|
Loading…
x
Reference in New Issue
Block a user