feat: Add multiple Ollama support
Adds support for using Ollama 2 as a model provider. This includes: - Adding Ollama 2 to the list of supported providers in the UI - Updating the model identification logic to properly handle Ollama 2 models - Modifying the model loading and runtime configuration to work with Ollama 2 - Implementing Ollama 2 specific functionality in the embedding and chat models This change allows users to leverage the capabilities of Ollama 2 for both embeddings and conversational AI tasks.
This commit is contained in:
@@ -1,14 +1,24 @@
|
||||
export const OAI_API_PROVIDERS = [
|
||||
{
|
||||
label: "Custom",
|
||||
value: "custom",
|
||||
baseUrl: ""
|
||||
},
|
||||
{
|
||||
label: "LM Studio",
|
||||
value: "lmstudio",
|
||||
baseUrl: "http://localhost:1234/v1"
|
||||
},
|
||||
{
|
||||
label: "LlamaFile",
|
||||
label: "Llamafile",
|
||||
value: "llamafile",
|
||||
baseUrl: "http://127.0.0.1:8080/v1"
|
||||
},
|
||||
{
|
||||
label: "Ollama",
|
||||
value: "ollama2",
|
||||
baseUrl: "http://localhost:11434/v1"
|
||||
},
|
||||
{
|
||||
label: "OpenAI",
|
||||
value: "openai",
|
||||
@@ -34,9 +44,5 @@ export const OAI_API_PROVIDERS = [
|
||||
value: "openrouter",
|
||||
baseUrl: "https://openrouter.ai/api/v1"
|
||||
},
|
||||
{
|
||||
label: "Custom",
|
||||
value: "custom",
|
||||
baseUrl: ""
|
||||
}
|
||||
|
||||
]
|
||||
Reference in New Issue
Block a user