Files
page-assist/src/models/index.ts

77 lines
1.3 KiB
TypeScript
Raw Normal View History

import { getModelInfo, isCustomModel } from "@/db/models"
2024-06-30 20:45:06 +05:30
import { ChatChromeAI } from "./ChatChromeAi"
import { ChatOllama } from "./ChatOllama"
import { getOpenAIConfigById } from "@/db/openai"
import { ChatOpenAI } from "@langchain/openai"
2024-06-30 20:45:06 +05:30
export const pageAssistModel = async ({
model,
baseUrl,
keepAlive,
temperature,
topK,
topP,
numCtx,
2024-08-20 16:11:50 +05:30
seed,
numGpu,
numPredict,
2024-06-30 20:45:06 +05:30
}: {
model: string
baseUrl: string
keepAlive?: string
temperature?: number
topK?: number
topP?: number
numCtx?: number
seed?: number
2024-08-20 16:11:50 +05:30
numGpu?: number
numPredict?: number
2024-06-30 20:45:06 +05:30
}) => {
if (model === "chrome::gemini-nano::page-assist") {
return new ChatChromeAI({
temperature,
topK,
})
}
const isCustom = isCustomModel(model)
if (isCustom) {
const modelInfo = await getModelInfo(model)
const providerInfo = await getOpenAIConfigById(modelInfo.provider_id)
return new ChatOpenAI({
modelName: modelInfo.model_id,
openAIApiKey: providerInfo.apiKey || "temp",
temperature,
topP,
maxTokens: numPredict,
configuration: {
apiKey: providerInfo.apiKey || "temp",
baseURL: providerInfo.baseUrl || "",
},
}) as any
2024-06-30 20:45:06 +05:30
}
return new ChatOllama({
baseUrl,
keepAlive,
temperature,
topK,
topP,
numCtx,
seed,
model,
numGpu,
numPredict
})
2024-06-30 20:45:06 +05:30
}