merge & fix tsx lanuch issue
This commit is contained in:
parent
167c791452
commit
68db65c61b
3147
package-lock.json
generated
3147
package-lock.json
generated
File diff suppressed because it is too large
Load Diff
@ -214,8 +214,7 @@
|
||||
},
|
||||
"workspaces": [
|
||||
"service",
|
||||
"renderer",
|
||||
"software"
|
||||
"renderer"
|
||||
],
|
||||
"scripts": {
|
||||
"setup": "npm i && npm run prepare:ocr",
|
||||
|
@ -132,6 +132,7 @@ import ConnectInterfaceOpenai from './connect-interface-openai.vue';
|
||||
import ConnectTest from './connect-test.vue';
|
||||
import { llmSettingRef, makeSimpleTalk, simpleTestResult } from './api';
|
||||
import { useMessageBridge } from '@/api/message-bridge';
|
||||
import { mcpSetting } from '@/hook/mcp';
|
||||
|
||||
defineComponent({ name: 'api' });
|
||||
const { t } = useI18n();
|
||||
@ -233,11 +234,13 @@ async function updateModels() {
|
||||
const llm = llms[llmManager.currentModelIndex];
|
||||
const apiKey = llm.userToken;
|
||||
const baseURL = llm.baseUrl;
|
||||
const proxyServer = mcpSetting.proxyServer;
|
||||
|
||||
const bridge = useMessageBridge();
|
||||
const { code, msg } = await bridge.commandRequest('llm/models', {
|
||||
apiKey,
|
||||
baseURL
|
||||
baseURL,
|
||||
proxyServer
|
||||
});
|
||||
|
||||
const isGemini = baseURL.includes('googleapis');
|
||||
|
@ -28,7 +28,7 @@ const { t } = useI18n();
|
||||
|
||||
const isGoogle = computed(() => {
|
||||
const model = llms[llmManager.currentModelIndex];
|
||||
return model.userModel.startsWith('gemini') || model.baseUrl.includes('googleapis');
|
||||
return model.userModel?.startsWith('gemini') || model.baseUrl.includes('googleapis');
|
||||
});
|
||||
|
||||
console.log(llms[llmManager.currentModelIndex]);
|
||||
|
@ -6,7 +6,7 @@
|
||||
"types": "dist/index.d.ts",
|
||||
"type": "module",
|
||||
"scripts": {
|
||||
"serve": "tsx watch src/main.ts",
|
||||
"serve": "nodemon --watch src --exec tsx src/main.ts",
|
||||
"build": "tsc",
|
||||
"build:watch": "tsc --watch",
|
||||
"postbuild": "node ./scripts/post-build.mjs",
|
||||
@ -28,6 +28,7 @@
|
||||
"@types/pako": "^2.0.3",
|
||||
"@types/ws": "^8.18.0",
|
||||
"esbuild": "^0.25.3",
|
||||
"nodemon": "^3.1.10",
|
||||
"tsconfig-paths": "^4.2.0",
|
||||
"tsx": "^4.19.4",
|
||||
"typescript": "^5.8.3",
|
||||
|
@ -5,6 +5,7 @@ import { PostMessageble } from "../hook/adapter.js";
|
||||
import { getClient } from "../mcp/connect.service.js";
|
||||
import { abortMessageService, streamingChatCompletion } from "./llm.service.js";
|
||||
import { OpenAI } from "openai";
|
||||
import { axiosFetch } from "src/hook/axios-fetch.js";
|
||||
export class LlmController {
|
||||
|
||||
@Controller('llm/chat/completions')
|
||||
@ -41,9 +42,14 @@ export class LlmController {
|
||||
const {
|
||||
baseURL,
|
||||
apiKey,
|
||||
proxyServer
|
||||
} = data;
|
||||
|
||||
const client = new OpenAI({ apiKey, baseURL });
|
||||
|
||||
const client = new OpenAI({
|
||||
apiKey,
|
||||
baseURL,
|
||||
});
|
||||
const models = await client.models.list();
|
||||
|
||||
return {
|
||||
|
Loading…
x
Reference in New Issue
Block a user