feat(docker image): add support for custom inference servers (#543)

- Add OPENAI_API_ENDPOINT configuration
- Add LLM_MODEL_NAME configuration
- Update documentation for custom server setup
- Add error handling for endpoint configuration
This commit is contained in:
Calum Siemer
2025-02-06 13:16:15 -05:00
committed by GitHub
parent 7b6271962a
commit 1878083056
6 changed files with 115 additions and 29 deletions

View File

@@ -10,7 +10,11 @@ server {
location /config.js {
default_type application/javascript;
return 200 "window.env = { OPENAI_API_KEY: \"$OPENAI_API_KEY\" };";
return 200 "window.env = {
OPENAI_API_KEY: \"$OPENAI_API_KEY\",
OPENAI_API_ENDPOINT: \"$OPENAI_API_ENDPOINT\",
LLM_MODEL_NAME: \"$LLM_MODEL_NAME\"
};";
}
error_page 500 502 503 504 /50x.html;