Test with Open-WebUI with full nix, podman and docker
At the end a docker compose is easier (wait for 25.04 maybe)
This commit is contained in:
parent
aec8b7a986
commit
22697d1913
11 changed files with 639 additions and 100 deletions
136
hosts/open-webui/docker.nix
Normal file
136
hosts/open-webui/docker.nix
Normal file
|
|
@ -0,0 +1,136 @@
|
|||
{ config, pkgs, ... }:
|
||||
let
|
||||
litellmSettings = {
|
||||
|
||||
general_settings = {
|
||||
proxy_batch_write_at = 60; # Batch write spend updates every 60s
|
||||
database_connection_pool_limit = 10; # Limit the number of database connections
|
||||
disable_spend_logs = true; # Turn off writing each transaction to the DB
|
||||
disable_error_logs = true; # Turn off writing LLM exceptions to DB
|
||||
allow_requests_on_db_unavailable = true; # Allow requests if DB is unavailable
|
||||
};
|
||||
|
||||
environment_variables = {
|
||||
LITELLM_MODE = "Production";
|
||||
};
|
||||
|
||||
credential_list = [
|
||||
{
|
||||
credential_name = "dp_azure_openai_credential";
|
||||
credential_values = {
|
||||
api_base = "os.environ/AZURE_API_BASE_OPENAI";
|
||||
api_key = "os.environ/AZURE_API_KEY_OPENAI";
|
||||
};
|
||||
credential_info = {
|
||||
description = "Azure OpenAI credentials for DP";
|
||||
};
|
||||
}
|
||||
{
|
||||
credential_name = "dp_azure_ai_credential";
|
||||
credential_values = {
|
||||
api_base = "os.environ/AZURE_API_BASE_AI";
|
||||
api_key = "os.environ/AZURE_API_KEY_AI";
|
||||
};
|
||||
credential_info = {
|
||||
description = "Azure AI credentials for DP";
|
||||
};
|
||||
}
|
||||
];
|
||||
|
||||
model_list = [
|
||||
{
|
||||
model_name = "text-embedding-3-large";
|
||||
litellm_params = {
|
||||
model = "azure/text-embedding-3-large";
|
||||
litellm_credential_name = "dp_azure_openai_credential";
|
||||
};
|
||||
model_info = {
|
||||
mode = "embedding";
|
||||
};
|
||||
}
|
||||
{
|
||||
model_name = "text-embedding-3-small";
|
||||
litellm_params = {
|
||||
model = "azure/text-embedding-3-small";
|
||||
litellm_credential_name = "dp_azure_openai_credential";
|
||||
};
|
||||
model_info = {
|
||||
mode = "embedding";
|
||||
};
|
||||
}
|
||||
{
|
||||
model_name = "GPT-3.5 Turbo";
|
||||
litellm_params = {
|
||||
model = "azure/gpt-35-turbo";
|
||||
litellm_credential_name = "dp_azure_openai_credential";
|
||||
};
|
||||
}
|
||||
{
|
||||
model_name = "GPT-4o";
|
||||
litellm_params = {
|
||||
model = "azure/gpt-4o";
|
||||
litellm_credential_name = "dp_azure_openai_credential";
|
||||
};
|
||||
}
|
||||
{
|
||||
model_name = "GPT o3 Mini";
|
||||
litellm_params = {
|
||||
model = "azure/o3-mini";
|
||||
litellm_credential_name = "dp_azure_openai_credential";
|
||||
};
|
||||
}
|
||||
{
|
||||
model_name = "GPT-4o Mini";
|
||||
litellm_params = {
|
||||
model = "azure/gpt-4o-mini";
|
||||
litellm_credential_name = "dp_azure_openai_credential";
|
||||
};
|
||||
}
|
||||
{
|
||||
model_name = "Dall-e 3";
|
||||
litellm_params = {
|
||||
model = "azure/dall-e-3";
|
||||
litellm_credential_name = "dp_azure_openai_credential";
|
||||
};
|
||||
model_info = {
|
||||
mode = "image_generation";
|
||||
};
|
||||
}
|
||||
{
|
||||
model_name = "azure-openai-4o-audio";
|
||||
litellm_params = {
|
||||
litellm_credential_name = "dp_azure_openai_credential";
|
||||
model = "azure/gpt-4o-audio-preview";
|
||||
};
|
||||
}
|
||||
{
|
||||
model_name = "DeepSeek-R1";
|
||||
litellm_params = {
|
||||
litellm_credential_name = "dp_azure_ai_credential";
|
||||
model = "azure_ai/deepseek-r1";
|
||||
};
|
||||
}
|
||||
];
|
||||
};
|
||||
in
|
||||
{
|
||||
|
||||
age.secrets = {
|
||||
open-webui.file = ../../secrets/open-webui.age;
|
||||
};
|
||||
|
||||
my = {
|
||||
|
||||
virtualisation.docker.enable = true;
|
||||
|
||||
utils = {
|
||||
commons.enable = true;
|
||||
commons.gc.enable = true;
|
||||
lxc-standard.enable = true;
|
||||
};
|
||||
|
||||
virtualisation.proxmox.enable = true;
|
||||
};
|
||||
|
||||
system.stateVersion = "24.11";
|
||||
}
|
||||
Loading…
Add table
Add a link
Reference in a new issue