LLM_Inferenz_Server_1/07_start_openwebui.sh
herzogflorian f4fdaab732 Add Open WebUI integration and enhance Streamlit app
- Add Open WebUI scripts (06-09) for server-hosted ChatGPT-like interface
  connected to the vLLM backend on port 7081
- Add context window management to chat (auto-trim, token counter, progress bar)
- Add terminal output panel to file editor for running Python/LaTeX files
- Update README with Open WebUI setup, architecture diagram, and troubleshooting
- Update STUDENT_GUIDE with step-by-step Open WebUI login instructions

Made-with: Cursor
2026-03-02 18:48:51 +01:00

71 lines
2.4 KiB
Bash
Executable File

#!/usr/bin/env bash
# ------------------------------------------------------------------
# 07_start_openwebui.sh
# Starts Open WebUI connected to the vLLM inference server.
#
# Open WebUI provides a ChatGPT-like interface with:
# - User accounts & chat history (persisted in openwebui-data/)
# - Model selector (auto-discovers models from vLLM)
# - Streaming responses, markdown rendering, code highlighting
#
# The first user to sign up becomes the admin.
#
# Usage:
# bash 07_start_openwebui.sh # defaults
# PORT=7082 bash 07_start_openwebui.sh # custom port
# VLLM_BASE_URL=http://localhost:7080/v1 bash 07_start_openwebui.sh
#
# Environment variables:
# PORT — HTTP port for Open WebUI (default: 7081)
# VLLM_BASE_URL — vLLM OpenAI-compatible URL (default: http://localhost:7080/v1)
# VLLM_API_KEY — API key for vLLM (default: EMPTY)
# DATA_DIR — Persistent storage path (default: ./openwebui-data)
# ------------------------------------------------------------------
set -euo pipefail
SCRIPT_DIR="$(cd "$(dirname "$0")" && pwd)"
SIF_FILE="${SCRIPT_DIR}/open-webui.sif"
PORT="${PORT:-7081}"
VLLM_BASE_URL="${VLLM_BASE_URL:-http://localhost:7080/v1}"
VLLM_API_KEY="${VLLM_API_KEY:-EMPTY}"
DATA_DIR="${DATA_DIR:-${SCRIPT_DIR}/openwebui-data}"
if [ ! -f "$SIF_FILE" ]; then
echo "ERROR: Container image not found at ${SIF_FILE}"
echo " Run 06_setup_openwebui.sh first."
exit 1
fi
mkdir -p "$DATA_DIR"
echo "=== Starting Open WebUI ==="
echo " Port: ${PORT}"
echo " vLLM backend: ${VLLM_BASE_URL}"
echo " Data directory: ${DATA_DIR}"
echo ""
echo " Access at: http://$(hostname -f 2>/dev/null || hostname):${PORT}"
echo " First user to sign up becomes admin."
echo ""
echo " Press Ctrl+C to stop."
echo "==========================================="
echo ""
apptainer exec \
--writable-tmpfs \
--pwd /app/backend \
--bind "${DATA_DIR}:/app/backend/data" \
--env PORT="${PORT}" \
--env ENABLE_OPENAI_API="True" \
--env OPENAI_API_BASE_URLS="${VLLM_BASE_URL}" \
--env OPENAI_API_KEYS="${VLLM_API_KEY}" \
--env ENABLE_OLLAMA_API="False" \
--env ENABLE_SIGNUP="True" \
--env DEFAULT_USER_ROLE="user" \
--env WEBUI_NAME="Qwen3.5 LLM Server" \
--env OFFLINE_MODE="True" \
--env ENABLE_VERSION_UPDATE_CHECK="False" \
--env HF_HUB_OFFLINE="1" \
"$SIF_FILE" \
bash start.sh