Add environment configuration and Docker Compose setup for Open WebUI and LiteLLM
This commit is contained in:
parent
22697d1913
commit
56ea386f83
3 changed files with 161 additions and 11 deletions
78
containers/open-webui/litellm-config.yaml
Normal file
78
containers/open-webui/litellm-config.yaml
Normal file
|
|
@ -0,0 +1,78 @@
|
|||
environment_variables: {}
|
||||
router_settings: {}
|
||||
|
||||
general_settings:
|
||||
proxy_batch_write_at: 60 # Batch write spend updates every 60s
|
||||
proxy_batch_write_at: 60 # Batch write spend updates every 60s
|
||||
database_connection_pool_limit: 10 # limit the number of database connections to = MAX Number of DB Connections/Number of instances of litellm proxy (Around 10-20 is good number)
|
||||
# OPTIONAL Best Practices
|
||||
disable_spend_logs: True # turn off writing each transaction to the db. We recommend doing this is you don't need to see Usage on the LiteLLM UI and are tracking metrics via Prometheus
|
||||
disable_error_logs: True # turn off writing LLM Exceptions to DB
|
||||
allow_requests_on_db_unavailable: True # Only USE when running LiteLLM on your VPC. Allow requests to still be processed even if the DB is unavailable. We recommend doing this if you're running LiteLLM on VPC that cannot be accessed from the public internet.
|
||||
store_model_in_db: True
|
||||
|
||||
litellm_settings:
|
||||
request_timeout: 600 # raise Timeout error if call takes longer than 600 seconds. Default value is 6000seconds if not set
|
||||
set_verbose: False # Switch off Debug Logging, ensure your logs do not have any debugging on
|
||||
json_logs: true # Get debug logs in json format
|
||||
|
||||
credential_list:
|
||||
- credential_name: azure_openai_credential
|
||||
credential_values:
|
||||
api_base: os.environ/AZURE_API_BASE_OPENAI
|
||||
api_key: os.environ/AZURE_API_KEY_OPENAI
|
||||
credential_info:
|
||||
description: Azure OpenAI credentials
|
||||
- credential_name: azure_ai_credential
|
||||
credential_values:
|
||||
api_base: os.environ/AZURE_API_BASE_AI
|
||||
api_key: os.environ/AZURE_API_KEY_AI
|
||||
credential_info:
|
||||
description: Azure AI credentials
|
||||
|
||||
model_list:
|
||||
- model_name: text-embedding-3-large
|
||||
litellm_params:
|
||||
model: azure/text-embedding-3-large
|
||||
litellm_credential_name: azure_openai_credential
|
||||
- model_name: text-embedding-3-small
|
||||
litellm_params:
|
||||
model: azure/text-embedding-3-small
|
||||
litellm_credential_name: azure_openai_credential
|
||||
- model_name: GPT-4.1
|
||||
litellm_params:
|
||||
model: azure/gpt-4.1
|
||||
litellm_credential_name: azure_openai_credential
|
||||
- model_name: GPT-4.1 Mini
|
||||
litellm_params:
|
||||
model: azure/gpt-4.1-mini
|
||||
litellm_credential_name: azure_openai_credential
|
||||
- model_name: GPT-3.5 Turbo
|
||||
litellm_params:
|
||||
model: azure/gpt-3.5-turbo
|
||||
litellm_credential_name: azure_openai_credential
|
||||
- model_name: GPT-4o
|
||||
litellm_params:
|
||||
model: azure/gpt-4o
|
||||
litellm_credential_name: azure_openai_credential
|
||||
- model_name: GPT-4o Mini
|
||||
litellm_params:
|
||||
model: azure/gpt-4o-mini
|
||||
litellm_credential_name: azure_openai_credential
|
||||
- model_name: o3 Mini
|
||||
litellm_params:
|
||||
model: azure/o3-mini
|
||||
api_version: 2024-12-01-preview
|
||||
litellm_credential_name: azure_openai_credential
|
||||
- model_name: GPT-4o Audio
|
||||
litellm_params:
|
||||
model: azure/azure-openai-4o-audio
|
||||
litellm_credential_name: azure_openai_credential
|
||||
- model_name: Dall-e 3
|
||||
litellm_params:
|
||||
model: azure/dall-e-3
|
||||
litellm_credential_name: azure_openai_credential
|
||||
- model_name: DeepSeek-R1
|
||||
litellm_params:
|
||||
model: azure_ai/DeepSeek-R1
|
||||
litellm_credential_name: azure_ai_credential
|
||||
Loading…
Add table
Add a link
Reference in a new issue