From d7b2b6afdedadd8461ae880982f32a085db4967b Mon Sep 17 00:00:00 2001 From: matatonic Date: Wed, 19 Jun 2024 18:53:42 -0400 Subject: [PATCH] 0.12.1 typo in sample.env --- vision.sample.env | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/vision.sample.env b/vision.sample.env index 5cfded1..bb2c8e4 100644 --- a/vision.sample.env +++ b/vision.sample.env @@ -77,8 +77,7 @@ HF_HUB_ENABLE_HF_TRANSFER=1 #CLI_COMMAND="python vision.py -m internlm/internlm-xcomposer2-vl-7b --use-flash-attn --device-map cuda:0" # test pass✅, time: 25.6s, mem: 20.1GB, 12/12 tests passed. #CLI_COMMAND="python vision.py -m internlm/internlm-xcomposer2-vl-7b-4bit --use-flash-attn" # test pass✅, time: 15.4s, mem: 10.8GB, 12/12 tests passed. #CLI_COMMAND="python vision.py -m llava-hf/llava-1.5-13b-hf --use-flash-attn --device-map cuda:0 --load-in-4bit" # test pass✅, time: 13.6s, mem: 9.5GB, 12/12 tests passed. -#CLI_COMMAND="python vision.py -m llava-hf/llava-1.5-13b-hf --use-flash-attn --device-map cuda:0" # test pass✅, time: 9.8s, mem: 26.7GB, 12/12 test -s passed. +#CLI_COMMAND="python vision.py -m llava-hf/llava-1.5-13b-hf --use-flash-attn --device-map cuda:0" # test pass✅, time: 9.8s, mem: 26.7GB, 12/12 tests passed. #CLI_COMMAND="python vision.py -m llava-hf/llava-1.5-7b-hf --use-flash-attn --device-map cuda:0 --load-in-4bit" # test pass✅, time: 9.5s, mem: 5.7GB, 12/12 tests passed. #CLI_COMMAND="python vision.py -m llava-hf/llava-1.5-7b-hf --use-flash-attn --device-map cuda:0" # test pass✅, time: 8.2s, mem: 14.4GB, 12/12 tests passed. #CLI_COMMAND="python vision.py -m llava-hf/llava-v1.6-34b-hf --use-flash-attn --load-in-4bit" # test pass✅, time: 63.8s, mem: 23.3GB, 12/12 tests passed.