Test with Open-WebUI with full nix, podman and docker

At the end a docker compose is easier (wait for 25.04 maybe)
This commit is contained in:
pazpi 2025-05-24 18:10:47 +02:00
parent aec8b7a986
commit 22697d1913
11 changed files with 639 additions and 100 deletions

View file

@ -11,5 +11,6 @@
./postgres.nix
./searx.nix
./vaultwarden.nix
./open-webui.nix
];
}

View file

@ -0,0 +1,203 @@
{
lib,
config,
pkgs,
...
}:
let
cfg = config.my.services.open-webui;
dbPort = 5432;
settingsFormat = pkgs.formats.yaml { };
inherit (lib) types;
in
{
options.my.services.open-webui = {
enable = lib.mkEnableOption "Enable Open Webui, alternative OpenAI frontend module";
port = lib.mkOption {
type = lib.types.int;
default = 8080;
description = ''
The port on which the Open Webui service will listen.
'';
};
environmentSecretsPath = lib.mkOption {
type = lib.types.path;
default = "";
description = ''
Path to the environment file containing secrets.
'';
};
environment = lib.mkOption {
type = lib.types.attrsOf lib.types.str;
default = { };
description = "Environment variables to set for Open Webui";
};
litellm = {
enable = lib.mkEnableOption "Enable LiteLLM OpenAI proxy module";
port = lib.mkOption {
type = lib.types.int;
default = 12345;
description = ''
The port on which the LiteLLM service will listen.
'';
};
settings = lib.mkOption {
type = types.submodule {
freeformType = settingsFormat.type;
options = {
model_list = lib.mkOption {
type = settingsFormat.type;
description = ''
List of supported models on the server, with model-specific configs.
'';
default = [ ];
};
router_settings = lib.mkOption {
type = settingsFormat.type;
description = ''
LiteLLM Router settings
'';
default = { };
};
litellm_settings = lib.mkOption {
type = settingsFormat.type;
description = ''
LiteLLM Module settings
'';
default = { };
};
general_settings = lib.mkOption {
type = settingsFormat.type;
description = ''
LiteLLM Server settings
'';
default = { };
};
environment_variables = lib.mkOption {
type = settingsFormat.type;
description = ''
Environment variables to pass to the Lite
'';
default = { };
};
};
};
default = { };
description = ''
Configuration for LiteLLM.
See <https://docs.litellm.ai/docs/proxy/configs> for more.
'';
};
};
};
config = lib.mkMerge [
(lib.mkIf cfg.enable {
# Enable Podman as the container runtime
virtualisation.podman = {
enable = true;
autoPrune = {
enable = true;
flags = [ "--all" ];
};
};
virtualisation.oci-containers = {
backend = "podman";
containers = {
open-webui = {
image = "ghcr.io/open-webui/open-webui:main";
ports = [
"${toString cfg.port}:8080"
];
environmentFiles = [ cfg.environmentSecretsPath ];
environment =
cfg.environment
// {
ENABLE_OPENAI_API = "true";
ENABLE_OLLAMA_API = "false";
ENABLE_WEB_SEARCH = "true";
WEB_SEARCH_ENGINE = "searxng";
SEARXNG_QUERY_URL = "https://search.pasetto.me/search?q=<query>";
}
// (lib.optionalAttrs cfg.litellm.enable {
OPENAI_API_BASE_URL = "http://host.containers.internal:${toString cfg.litellm.port}";
});
volumes = [ "open-webui:/app/backend/data" ];
labels = {
"io.containers.autoupdate" = "registry";
};
};
};
};
networking.firewall.allowedTCPPorts = [ cfg.port ];
})
(lib.mkIf cfg.litellm.enable {
virtualisation.oci-containers.containers = {
litellm =
let
litellmSettings = cfg.litellm.settings;
configFile = settingsFormat.generate "config.yaml" litellmSettings;
in
{
image = "ghcr.io/berriai/litellm:main-stable";
volumes = [ "${configFile}:/app/config.yaml" ];
cmd = [ "--config=/app/config.yaml" ];
ports = [ "${toString cfg.litellm.port}:4000" ];
environmentFiles = [ cfg.environmentSecretsPath ];
environment = {
DATABASE_URL = "postgresql://llmproxy:llmproxypwd@host.containers.internal:5432/litellm";
STORE_MODEL_IN_DB = "True";
USE_PRISMA_MIGRATE = "True";
};
labels = {
"io.containers.autoupdate" = "registry";
};
};
litellm_db = {
image = "docker.io/library/postgres:16";
hostname = "db";
ports = [ "5432:5432" ];
environment = {
POSTGRES_DB = "litellm";
POSTGRES_USER = "llmproxy";
POSTGRES_PASSWORD = "llmproxypwd";
};
volumes = [ "litellm_postgres_data:/var/lib/postgresql/data" ];
labels = {
"io.containers.autoupdate" = "registry";
};
};
};
networking.firewall.allowedTCPPorts = [
cfg.litellm.port
dbPort
];
systemd.services."podman-litellm".after = [ "podman-litellm_db.service" ];
systemd.services."podman-litellm".requires = [ "podman-litellm_db.service" ];
})
];
}