添加没有填写 token 提醒

This commit is contained in:
锦恢 2025-05-02 23:39:13 +08:00
parent b7015d7532
commit 73a5b05a5d
5 changed files with 89 additions and 22 deletions

View File

@ -24,6 +24,8 @@ import { getPlatform } from './api/platform';
import Tour from '@/components/guide/tour.vue';
import { userHasReadGuide } from './components/guide/tour';
import { ElLoading } from 'element-plus';
const bridge = useMessageBridge();
//
@ -36,6 +38,13 @@ const route = useRoute();
const router = useRouter();
onMounted(async () => {
const loading = ElLoading.service({
fullscreen: true,
lock: true,
text: 'Loading',
background: 'rgba(0, 0, 0, 0.7)'
});
// css
setDefaultCss();
@ -78,6 +87,7 @@ onMounted(async () => {
// loading panels
await loadPanels();
loading.close();
});
</script>

View File

@ -1,18 +1,41 @@
import { ToolItem } from "@/hook/type";
import { ref } from "vue";
import { ToolCallContent, ToolItem } from "@/hook/type";
import { Ref, ref } from "vue";
import type { OpenAI } from 'openai';
type ChatCompletionChunk = OpenAI.Chat.Completions.ChatCompletionChunk;
export interface IExtraInfo {
created: number,
serverName: string,
usage?: ChatCompletionChunk['usage'];
[key: string]: any
export enum MessageState {
ServerError = 'server internal error',
ReceiveChunkError = 'receive chunk error',
Timeout = 'timeout',
MaxEpochs = 'max epochs',
Unknown = 'unknown error',
Abort = 'abort',
ToolCall = 'tool call failed',
None = 'none',
Success = 'success',
ParseJsonError = 'parse json error'
}
export interface ChatMessage {
role: 'user' | 'assistant' | 'system' | 'tool';
export interface IExtraInfo {
created: number,
state: MessageState,
serverName: string,
usage?: ChatCompletionChunk['usage'];
[key: string]: any;
}
export interface ToolMessage {
role: 'tool';
content: ToolCallContent[];
tool_call_id?: string
name?: string // 工具名称,当 role 为 tool
tool_calls?: ToolCall[],
extraInfo: IExtraInfo
}
export interface TextMessage {
role: 'user' | 'assistant' | 'system';
content: string;
tool_call_id?: string
name?: string // 工具名称,当 role 为 tool
@ -20,6 +43,8 @@ export interface ChatMessage {
extraInfo: IExtraInfo
}
export type ChatMessage = ToolMessage | TextMessage;
// 新增状态和工具数据
interface EnableToolItem {
name: string;
@ -53,6 +78,15 @@ export interface ToolCall {
export const allTools = ref<ToolItem[]>([]);
export interface IRenderMessage {
role: 'user' | 'assistant/content' | 'assistant/tool_calls' | 'tool';
content: string;
toolResult?: ToolCallContent[];
tool_calls?: ToolCall[];
showJson?: Ref<boolean>;
extraInfo: IExtraInfo;
}
export function getToolSchema(enableTools: EnableToolItem[]) {
const toolsSchema = [];
for (let i = 0; i < enableTools.length; i++) {

View File

@ -6,6 +6,7 @@ import type { OpenAI } from 'openai';
import { callTool } from "../tool/tools";
import { llmManager, llms } from "@/views/setting/llm";
import { pinkLog, redLog } from "@/views/setting/util";
import { ElMessage } from "element-plus";
export type ChatCompletionChunk = OpenAI.Chat.Completions.ChatCompletionChunk;
export type ChatCompletionCreateParamsBase = OpenAI.Chat.Completions.ChatCompletionCreateParams & { id?: string };
@ -200,9 +201,19 @@ export class TaskLoop {
});
}
public makeChatData(tabStorage: ChatStorage): ChatCompletionCreateParamsBase {
public makeChatData(tabStorage: ChatStorage): ChatCompletionCreateParamsBase | undefined {
const baseURL = llms[llmManager.currentModelIndex].baseUrl;
const apiKey = llms[llmManager.currentModelIndex].userToken;
const apiKey = llms[llmManager.currentModelIndex].userToken || '';
if (apiKey.trim() === '') {
if (tabStorage.messages.length > 0 && tabStorage.messages[tabStorage.messages.length - 1].role === 'user') {
tabStorage.messages.pop();
ElMessage.error('请先设置 API Key');
}
return undefined;
}
const model = llms[llmManager.currentModelIndex].userModel;
const temperature = tabStorage.settings.temperature;
const tools = getToolSchema(tabStorage.settings.enableTools);
@ -289,6 +300,11 @@ export class TaskLoop {
// 构造 chatData
const chatData = this.makeChatData(tabStorage);
if (!chatData) {
this.onDone();
break;
}
this.currentChatId = chatData.id!;
// 发送请求

View File

@ -58,6 +58,9 @@ export async function streamingChatCompletion(
break;
}
console.log(chunk);
if (chunk.choices) {
const chunkResult = {
code: 200,

View File

@ -1,5 +1,5 @@
import { getConnectionConfig, IConnectionItem, panels, saveConnectionConfig, getFirstValidPathFromCommand } from "../global";
import { exec, spawn } from 'node:child_process';
import * as vscode from 'vscode';
export async function deleteInstalledConnection(item: IConnectionItem) {
@ -35,16 +35,18 @@ export async function deleteInstalledConnection(item: IConnectionItem) {
}
}
export async function validateAndGetCommandPath(command: string, cwd?: string): Promise<string> {
const { exec } = require('child_process');
const { promisify } = require('util');
const execAsync = promisify(exec);
export async function validateAndGetCommandPath(commandString: string, cwd?: string): Promise<string> {
try {
const { stdout } = await execAsync(`which ${command.split(' ')[0]}`, { cwd });
return stdout.trim();
const commands = commandString.split(' ');
const command = commands[0];
const args = commands.slice(1);
const process = spawn(command, args || [], { shell: true, cwd });
process.disconnect();
return '';
} catch (error) {
throw new Error(`无法找到命令: ${command.split(' ')[0]}`);
console.log(error);
throw new Error(`无法找到命令: ${commandString.split(' ')[0]}`);
}
}
@ -89,6 +91,8 @@ export async function acquireInstalledConnection(): Promise<IConnectionItem | un
const command = commands[0];
const args = commands.slice(1);
console.log('Command:', command);
const filePath = await getFirstValidPathFromCommand(commandString, cwd || '');
// 保存连接配置