2024-04-02 13:07:30 +08:00
# This script build the CPU docker image and run the offline inference inside the container.
# It serves a sanity check for compilation and basic model usage.
set -ex
# Try building the docker image
docker build -t cpu-test -f Dockerfile.cpu .
2024-06-18 02:36:10 +08:00
docker build --build-arg VLLM_CPU_DISABLE_AVX512 = "true" -t cpu-test-avx2 -f Dockerfile.cpu .
2024-04-02 13:07:30 +08:00
# Setup cleanup
2024-06-18 02:36:10 +08:00
remove_docker_container( ) { docker rm -f cpu-test cpu-test-avx2 || true; }
2024-04-02 13:07:30 +08:00
trap remove_docker_container EXIT
remove_docker_container
2024-06-04 01:39:50 +08:00
# Run the image
2024-06-05 01:26:40 +08:00
docker run -itd -v ~/.cache/huggingface:/root/.cache/huggingface --cpuset-cpus= 48-95 --cpuset-mems= 1 --network host -e HF_TOKEN --env VLLM_CPU_KVCACHE_SPACE = 4 --name cpu-test cpu-test
2024-06-18 02:36:10 +08:00
docker run -itd -v ~/.cache/huggingface:/root/.cache/huggingface --cpuset-cpus= 48-95 --cpuset-mems= 1 --network host -e HF_TOKEN --env VLLM_CPU_KVCACHE_SPACE = 4 --name cpu-test-avx2 cpu-test-avx2
2024-06-04 01:39:50 +08:00
# offline inference
docker exec cpu-test bash -c "python3 examples/offline_inference.py"
2024-06-18 02:36:10 +08:00
docker exec cpu-test-avx2 bash -c "python3 examples/offline_inference.py"
2024-06-04 01:39:50 +08:00
# Run basic model test
docker exec cpu-test bash -c " cd tests;
pip install pytest Pillow protobuf
cd ../
2024-07-03 02:11:29 +03:00
pytest -v -s tests/models -m \" not vlm\" --ignore= tests/models/test_embedding.py --ignore= tests/models/test_registry.py --ignore= tests/models/test_jamba.py" # Mamba on CPU is not supported