- {{ llms[llmManager.currentModelIndex].name }}/{{
- llms[llmManager.currentModelIndex].models[selectedModelIndex] }}
+ {{ currentServerName }}/{{ currentModelName }}
@@ -142,6 +141,22 @@ const showTemperatureSlider = ref(false);
const showContextLengthDialog = ref(false);
const showSystemPromptDialog = ref(false);
+const currentServerName = computed(() => {
+ const currentLlm = llms[llmManager.currentModelIndex];
+ if (currentLlm) {
+ return currentLlm.name;
+ }
+ return '';
+});
+
+const currentModelName = computed(() => {
+ const currentLlm = llms[llmManager.currentModelIndex];
+ if (currentLlm) {
+ return currentLlm.models[selectedModelIndex.value];
+ }
+ return '';
+});
+
const tab = tabs.content[props.tabId];
const tabStorage = tab.storage as ChatStorage & { settings: ChatSetting };
@@ -157,7 +172,6 @@ if (!tabStorage.settings) {
} as ChatSetting;
}
-
const selectedModelIndex = ref(llmManager.currentModelIndex);
const availableModels = computed(() => {
diff --git a/renderer/src/components/main-panel/chat/task-loop.ts b/renderer/src/components/main-panel/chat/task-loop.ts
index 8117178..f0ec9b9 100644
--- a/renderer/src/components/main-panel/chat/task-loop.ts
+++ b/renderer/src/components/main-panel/chat/task-loop.ts
@@ -1,5 +1,6 @@
+/* eslint-disable */
import { Ref } from "vue";
-import { ToolCall, ChatMessage, ChatStorage, getToolSchema } from "./chat";
+import { ToolCall, ChatStorage, getToolSchema } from "./chat";
import { useMessageBridge } from "@/api/message-bridge";
import type { OpenAI } from 'openai';
import { callTool } from "../tool/tools";
@@ -16,15 +17,15 @@ interface TaskLoopOptions {
*/
export class TaskLoop {
private bridge = useMessageBridge();
+ private currentChatId = '';
constructor(
private readonly streamingContent: Ref