From cecd8d3c98b48f51aaa1d4c729e55bd319f6799c Mon Sep 17 00:00:00 2001 From: kunnis Date: Mon, 8 Apr 2024 10:44:19 -0500 Subject: [PATCH] Comment explaining a decision (#6531) --- convert.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/convert.py b/convert.py index 244eb75822fd8..a37aeb5e5a652 100755 --- a/convert.py +++ b/convert.py @@ -139,7 +139,8 @@ def type_for_tensor(self, name: str, tensor: LazyTensor) -> DataType: dt = GGML_FILE_TYPE_TO_DATA_TYPE.get(self) if dt is None: raise ValueError(self) - # 1D tensors are always F32. + # Convert all 1D tensors to F32. Most of the codebase that takes in 1D tensors only handles F32 tensors, and most of the outputs tensors are F32. + # Also The 1d tensors aren't much of a performance/size issue. So instead of having to have separate F32 and F16 implementations of both, just convert everything to F32 for now. return dt if len(tensor.shape) > 1 else DT_F32