Bootstrap: docker From: vllm/vllm-openai:nightly %labels Author herzogfloria Description vLLM nightly inference server for Qwen3.5-35B-A3B Version 3.0 %environment export HF_HOME=/tmp/hf_cache export VLLM_USAGE_SOURCE=production %post apt-get update && apt-get install -y --no-install-recommends git && rm -rf /var/lib/apt/lists/* pip install --no-cache-dir "transformers @ git+https://github.com/huggingface/transformers.git@main" pip install --no-cache-dir huggingface_hub[cli] %runscript exec python3 -m vllm.entrypoints.openai.api_server "$@" %help Apptainer container for serving Qwen3.5-35B-A3B via vLLM (nightly).