2024-09-29 19:57:26 +05:30
|
|
|
import { getModelInfo, isCustomModel } from "@/db/models"
|
2024-06-30 20:45:06 +05:30
|
|
|
import { ChatChromeAI } from "./ChatChromeAi"
|
|
|
|
|
import { ChatOllama } from "./ChatOllama"
|
2024-09-29 19:57:26 +05:30
|
|
|
import { getOpenAIConfigById } from "@/db/openai"
|
|
|
|
|
import { ChatOpenAI } from "@langchain/openai"
|
2024-06-30 20:45:06 +05:30
|
|
|
|
|
|
|
|
export const pageAssistModel = async ({
|
|
|
|
|
model,
|
|
|
|
|
baseUrl,
|
|
|
|
|
keepAlive,
|
|
|
|
|
temperature,
|
|
|
|
|
topK,
|
|
|
|
|
topP,
|
|
|
|
|
numCtx,
|
2024-08-20 16:11:50 +05:30
|
|
|
seed,
|
2024-11-09 16:56:47 +05:30
|
|
|
numGpu,
|
|
|
|
|
numPredict,
|
2024-06-30 20:45:06 +05:30
|
|
|
}: {
|
|
|
|
|
model: string
|
|
|
|
|
baseUrl: string
|
2024-08-05 00:49:27 +05:30
|
|
|
keepAlive?: string
|
|
|
|
|
temperature?: number
|
|
|
|
|
topK?: number
|
|
|
|
|
topP?: number
|
|
|
|
|
numCtx?: number
|
|
|
|
|
seed?: number
|
2024-08-20 16:11:50 +05:30
|
|
|
numGpu?: number
|
2024-11-09 16:56:47 +05:30
|
|
|
numPredict?: number
|
2024-06-30 20:45:06 +05:30
|
|
|
}) => {
|
2024-09-29 19:57:26 +05:30
|
|
|
|
|
|
|
|
if (model === "chrome::gemini-nano::page-assist") {
|
|
|
|
|
return new ChatChromeAI({
|
|
|
|
|
temperature,
|
2024-11-09 16:56:47 +05:30
|
|
|
topK,
|
2024-09-29 19:57:26 +05:30
|
|
|
})
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
const isCustom = isCustomModel(model)
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
if (isCustom) {
|
|
|
|
|
const modelInfo = await getModelInfo(model)
|
|
|
|
|
const providerInfo = await getOpenAIConfigById(modelInfo.provider_id)
|
2024-10-12 23:32:00 +05:30
|
|
|
|
2024-09-29 19:57:26 +05:30
|
|
|
return new ChatOpenAI({
|
|
|
|
|
modelName: modelInfo.model_id,
|
2024-09-30 10:28:35 +05:30
|
|
|
openAIApiKey: providerInfo.apiKey || "temp",
|
2024-09-29 19:57:26 +05:30
|
|
|
temperature,
|
|
|
|
|
topP,
|
2024-11-09 16:56:47 +05:30
|
|
|
maxTokens: numPredict,
|
2024-09-29 19:57:26 +05:30
|
|
|
configuration: {
|
2024-09-30 10:28:35 +05:30
|
|
|
apiKey: providerInfo.apiKey || "temp",
|
2024-09-29 19:57:26 +05:30
|
|
|
baseURL: providerInfo.baseUrl || "",
|
2024-11-09 15:17:59 +05:30
|
|
|
},
|
2024-09-29 19:57:26 +05:30
|
|
|
}) as any
|
2024-06-30 20:45:06 +05:30
|
|
|
}
|
2024-09-29 19:57:26 +05:30
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
return new ChatOllama({
|
|
|
|
|
baseUrl,
|
|
|
|
|
keepAlive,
|
|
|
|
|
temperature,
|
|
|
|
|
topK,
|
|
|
|
|
topP,
|
|
|
|
|
numCtx,
|
|
|
|
|
seed,
|
|
|
|
|
model,
|
2024-11-09 16:56:47 +05:30
|
|
|
numGpu,
|
|
|
|
|
numPredict
|
2024-09-29 19:57:26 +05:30
|
|
|
})
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
2024-06-30 20:45:06 +05:30
|
|
|
}
|