diff --git a/python/llm/dev/benchmark/all-in-one/run.py b/python/llm/dev/benchmark/all-in-one/run.py index e74214e54be..c4af2a04b9b 100644 --- a/python/llm/dev/benchmark/all-in-one/run.py +++ b/python/llm/dev/benchmark/all-in-one/run.py @@ -1823,12 +1823,12 @@ def run_pipeline_parallel_gpu(repo_id, import pandas as pd for api in conf.test_api: global csv_name + csv_name = f'{current_dir}/{api}-results-{today}.csv' if not OmegaConf.is_list(conf["batch_size"]): batch_list = [conf["batch_size"]] else: batch_list = conf["batch_size"] for batch_size in batch_list: - csv_name = f'{current_dir}/{api}-results-{today}.csv' for model in conf.repo_id: in_out_pairs = conf['in_out_pairs'].copy() if excludes: