Test with Open-WebUI with full nix, podman and docker

At the end a docker compose is easier (wait for 25.04 maybe)
This commit is contained in:
pazpi 2025-05-24 18:10:47 +02:00
parent aec8b7a986
commit 22697d1913
11 changed files with 639 additions and 100 deletions

View file

@ -173,7 +173,12 @@ in
}
{
subdomain = "ai";
host = "http://${p.hosts.portainer}:4000";
host = "http://${p.hosts.portainer}:4080";
domain = p.domains.public;
}
{
subdomain = "keep";
host = "http://${p.hosts.portainer}:3000";
domain = p.domains.public;
}
];

View file

@ -249,7 +249,7 @@ in
modules = [
myModules
proxmoxModule
./open-webui
./open-webui/docker.nix
agenix.nixosModules.default
];
# specialArgs = { };

View file

@ -1,22 +1,157 @@
{
config,
pkgs,
lib,
...
}:
{ config, pkgs, ... }:
let
p = import ../parameters.nix;
litellm-port = 12345;
litellmSettings = {
general_settings = {
proxy_batch_write_at = 60; # Batch write spend updates every 60s
database_connection_pool_limit = 10; # Limit the number of database connections
disable_spend_logs = true; # Turn off writing each transaction to the DB
disable_error_logs = true; # Turn off writing LLM exceptions to DB
allow_requests_on_db_unavailable = true; # Allow requests if DB is unavailable
};
environment_variables = {
LITELLM_MODE = "Production";
};
credential_list = [
{
credential_name = "dp_azure_openai_credential";
credential_values = {
api_base = "os.environ/AZURE_API_BASE_OPENAI";
api_key = "os.environ/AZURE_API_KEY_OPENAI";
};
credential_info = {
description = "Azure OpenAI credentials for DP";
};
}
{
credential_name = "dp_azure_ai_credential";
credential_values = {
api_base = "os.environ/AZURE_API_BASE_AI";
api_key = "os.environ/AZURE_API_KEY_AI";
};
credential_info = {
description = "Azure AI credentials for DP";
};
}
];
model_list = [
{
model_name = "text-embedding-3-large";
litellm_params = {
model = "azure/text-embedding-3-large";
litellm_credential_name = "dp_azure_openai_credential";
};
model_info = {
mode = "embedding";
};
}
{
model_name = "text-embedding-3-small";
litellm_params = {
model = "azure/text-embedding-3-small";
litellm_credential_name = "dp_azure_openai_credential";
};
model_info = {
mode = "embedding";
};
}
{
model_name = "GPT-3.5 Turbo";
litellm_params = {
model = "azure/gpt-35-turbo";
litellm_credential_name = "dp_azure_openai_credential";
};
}
{
model_name = "GPT-4o";
litellm_params = {
model = "azure/gpt-4o";
litellm_credential_name = "dp_azure_openai_credential";
};
}
{
model_name = "GPT-4.1";
litellm_params = {
model = "azure/gpt-4.1";
litellm_credential_name = "dp_azure_openai_credential";
};
}
{
model_name = "GPT-4.1 Mini";
litellm_params = {
model = "azure/gpt-4.1-mini";
litellm_credential_name = "dp_azure_openai_credential";
};
}
{
model_name = "GPT o3 Mini";
litellm_params = {
model = "azure/o3-mini";
litellm_credential_name = "dp_azure_openai_credential";
};
}
{
model_name = "GPT-4o Mini";
litellm_params = {
model = "azure/gpt-4o-mini";
litellm_credential_name = "dp_azure_openai_credential";
};
}
{
model_name = "Dall-e 3";
litellm_params = {
model = "azure/dall-e-3";
litellm_credential_name = "dp_azure_openai_credential";
};
model_info = {
mode = "image_generation";
};
}
{
model_name = "azure-openai-4o-audio";
litellm_params = {
litellm_credential_name = "dp_azure_openai_credential";
model = "azure/gpt-4o-audio-preview";
};
}
{
model_name = "DeepSeek-R1";
litellm_params = {
litellm_credential_name = "dp_azure_ai_credential";
model = "azure_ai/deepseek-r1";
};
}
];
};
in
{
age.secrets = {
azure-ai.file = ../../secrets/azure-ai.age;
# open-webui-secrets.file = ../../secrets/open-webui-secrets.age;
open-webui.file = ../../secrets/open-webui.age;
};
my = {
services.open-webui = {
enable = true;
port = 4000;
environmentSecretsPath = config.age.secrets.open-webui.path;
environment = {
OAUTH_PROVIDER_NAME = "authentik";
OPENID_PROVIDER_URL = "https://auth.pasetto.me/application/o/openwebui/.well-known/openid-configuration";
OPENID_REDIRECT_URI = "https://ai.pasetto.me/oauth/oidc/callback";
ENABLE_OAUTH_SIGNUP = "true";
ENABLE_LOGIN_FORM = "false";
};
litellm = {
enable = true;
settings = litellmSettings;
};
};
utils = {
commons.enable = true;
commons.gc.enable = true;
@ -26,74 +161,5 @@ in
virtualisation.proxmox.enable = true;
};
services.litellm = {
enable = true;
environmentFile = config.age.secrets.azure-ai.path;
host = "0.0.0.0";
openFirewall = true;
port = litellm-port;
settings = {
model_list = [
{
model_name = "azure-gpt-35-turbo";
litellm_params = {
model = "azure/gpt-35-turbo";
api_base = "os.environ/AZURE_API_BASE_OPENAI";
api_key = "os.environ/AZURE_API_KEY_OPENAI";
};
}
{
model_name = "azure-gpt-4o";
litellm_params = {
model = "azure/gpt-4o";
api_base = "os.environ/AZURE_API_BASE_OPENAI";
api_key = "os.environ/AZURE_API_KEY_OPENAI";
};
}
{
model_name = "azure-gpt-4o-mini";
litellm_params = {
model = "azure/gpt-4o-mini";
api_base = "os.environ/AZURE_API_BASE_OPENAI";
api_key = "os.environ/AZURE_API_KEY_OPENAI";
};
}
{
model_name = "azure-gpt-o3-mini";
litellm_params = {
model = "azure/o3-mini";
api_base = "os.environ/AZURE_API_BASE_OPENAI";
api_key = "os.environ/AZURE_API_KEY_OPENAI";
api_version = "2024-12-01-preview";
};
}
{
model_name = "azure-openai-4o-audio";
litellm_params = {
model = "azure/gpt-4o-audio-preview";
api_base = "os.environ/AZURE_API_BASE_OPENAI";
api_key = "os.environ/AZURE_API_KEY_OPENAI";
};
}
{
model_name = "dall-e-3";
litellm_params = {
model = "azure/dall-e-3";
api_base = "os.environ/AZURE_API_BASE_OPENAI";
api_key = "os.environ/AZURE_API_KEY_OPENAI";
};
}
{
model_name = "azure-DeepSeek-R1";
litellm_params = {
model = "azure_ai/DeepSeek-R1";
api_base = "os.environ/AZURE_API_BASE_R1";
api_key = "os.environ/AZURE_API_KEY_R1";
};
}
];
};
};
system.stateVersion = "24.11";
}

136
hosts/open-webui/docker.nix Normal file
View file

@ -0,0 +1,136 @@
{ config, pkgs, ... }:
let
litellmSettings = {
general_settings = {
proxy_batch_write_at = 60; # Batch write spend updates every 60s
database_connection_pool_limit = 10; # Limit the number of database connections
disable_spend_logs = true; # Turn off writing each transaction to the DB
disable_error_logs = true; # Turn off writing LLM exceptions to DB
allow_requests_on_db_unavailable = true; # Allow requests if DB is unavailable
};
environment_variables = {
LITELLM_MODE = "Production";
};
credential_list = [
{
credential_name = "dp_azure_openai_credential";
credential_values = {
api_base = "os.environ/AZURE_API_BASE_OPENAI";
api_key = "os.environ/AZURE_API_KEY_OPENAI";
};
credential_info = {
description = "Azure OpenAI credentials for DP";
};
}
{
credential_name = "dp_azure_ai_credential";
credential_values = {
api_base = "os.environ/AZURE_API_BASE_AI";
api_key = "os.environ/AZURE_API_KEY_AI";
};
credential_info = {
description = "Azure AI credentials for DP";
};
}
];
model_list = [
{
model_name = "text-embedding-3-large";
litellm_params = {
model = "azure/text-embedding-3-large";
litellm_credential_name = "dp_azure_openai_credential";
};
model_info = {
mode = "embedding";
};
}
{
model_name = "text-embedding-3-small";
litellm_params = {
model = "azure/text-embedding-3-small";
litellm_credential_name = "dp_azure_openai_credential";
};
model_info = {
mode = "embedding";
};
}
{
model_name = "GPT-3.5 Turbo";
litellm_params = {
model = "azure/gpt-35-turbo";
litellm_credential_name = "dp_azure_openai_credential";
};
}
{
model_name = "GPT-4o";
litellm_params = {
model = "azure/gpt-4o";
litellm_credential_name = "dp_azure_openai_credential";
};
}
{
model_name = "GPT o3 Mini";
litellm_params = {
model = "azure/o3-mini";
litellm_credential_name = "dp_azure_openai_credential";
};
}
{
model_name = "GPT-4o Mini";
litellm_params = {
model = "azure/gpt-4o-mini";
litellm_credential_name = "dp_azure_openai_credential";
};
}
{
model_name = "Dall-e 3";
litellm_params = {
model = "azure/dall-e-3";
litellm_credential_name = "dp_azure_openai_credential";
};
model_info = {
mode = "image_generation";
};
}
{
model_name = "azure-openai-4o-audio";
litellm_params = {
litellm_credential_name = "dp_azure_openai_credential";
model = "azure/gpt-4o-audio-preview";
};
}
{
model_name = "DeepSeek-R1";
litellm_params = {
litellm_credential_name = "dp_azure_ai_credential";
model = "azure_ai/deepseek-r1";
};
}
];
};
in
{
age.secrets = {
open-webui.file = ../../secrets/open-webui.age;
};
my = {
virtualisation.docker.enable = true;
utils = {
commons.enable = true;
commons.gc.enable = true;
lxc-standard.enable = true;
};
virtualisation.proxmox.enable = true;
};
system.stateVersion = "24.11";
}

View file

@ -0,0 +1,145 @@
{
config,
pkgs,
lib,
...
}:
let
p = import ../parameters.nix;
litellm-port = 12345;
in
{
age.secrets = {
azure-ai.file = ../../secrets/azure-ai.age;
};
my = {
utils = {
commons.enable = true;
commons.gc.enable = true;
lxc-standard.enable = true;
};
virtualisation.proxmox.enable = true;
};
services.litellm = {
enable = true;
environmentFile = config.age.secrets.azure-ai.path;
host = "0.0.0.0";
openFirewall = true;
port = litellm-port;
settings = {
general_settings = {
proxy_batch_write_at = 60; # Batch write spend updates every 60s
};
environment_variables = {
LITELLM_MODE = "Production";
};
credential_list = [
{
credential_name = "dp_azure_openai_credential";
credential_values = {
api_base = "os.environ/AZURE_API_BASE_OPENAI";
api_key = "os.environ/AZURE_API_KEY_OPENAI";
};
credential_info = {
description = "Azure OpenAI credentials for DP";
};
}
{
credential_name = "dp_azure_ai_credential";
credential_values = {
api_base = "os.environ/AZURE_API_BASE_AI";
api_key = "os.environ/AZURE_API_KEY_AI";
};
credential_info = {
description = "Azure AI credentials for DP";
};
}
];
model_list = [
{
model_name = "text-embedding-3-large";
litellm_params = {
model = "azure/text-embedding-3-large";
litellm_credential_name = "dp_azure_openai_credential";
};
model_info = {
mode = "embedding";
};
}
{
model_name = "text-embedding-3-small";
litellm_params = {
model = "azure/text-embedding-3-small";
litellm_credential_name = "dp_azure_openai_credential";
};
model_info = {
mode = "embedding";
};
}
{
model_name = "GPT-3.5 Turbo";
litellm_params = {
model = "azure/gpt-35-turbo";
litellm_credential_name = "dp_azure_openai_credential";
};
}
{
model_name = "GPT-4o";
litellm_params = {
model = "azure/gpt-4o";
litellm_credential_name = "dp_azure_openai_credential";
};
}
{
model_name = "GPT o3 Mini";
litellm_params = {
model = "azure/o3-mini";
litellm_credential_name = "dp_azure_openai_credential";
};
}
{
model_name = "GPT-4o Mini";
litellm_params = {
model = "azure/gpt-4o-mini";
litellm_credential_name = "dp_azure_openai_credential";
};
}
{
model_name = "Dall-e 3";
litellm_params = {
model = "azure/dall-e-3";
litellm_credential_name = "dp_azure_openai_credential";
};
model_info = {
mode = "image_generation";
};
}
{
model_name = "azure-openai-4o-audio";
litellm_params = {
litellm_credential_name = "dp_azure_openai_credential";
model = "azure/gpt-4o-audio-preview";
};
}
{
model_name = "DeepSeek-R1";
litellm_params = {
litellm_credential_name = "dp_azure_ai_credential";
model = "azure_ai/deepseek-r1";
};
}
];
};
};
system.stateVersion = "24.11";
}