Skip to content

Commit

Permalink
Unpacking predictions to fix issue with weirdly saved predictions.
Browse files Browse the repository at this point in the history
  • Loading branch information
JoelNiklaus committed Jan 13, 2025
1 parent ca8331a commit 3a22d93
Showing 1 changed file with 9 additions and 1 deletion.
10 changes: 9 additions & 1 deletion src/lighteval/pipeline.py
Original file line number Diff line number Diff line change
Expand Up @@ -280,6 +280,14 @@ def evaluate(self):
except OSError:
pass

def _unpack(self, x):
if isinstance(x, str):
return x
elif isinstance(x, (list, tuple)):
return self._unpack(x[0])
else:
raise ValueError(f"Unknown type {type(x)} of prediction {x}")

def _load_responses_from_details(self):
logger.info("--- LOADING RESPONSES FROM DETAILS ---")
sample_id_to_responses: dict[(SampleUid, MetricCategory), list[ModelResponse]] = collections.defaultdict(list)
Expand All @@ -305,7 +313,7 @@ def _load_responses_from_details(self):
)
num_samples = self.pipeline_parameters.max_samples

predictions = [ast.literal_eval(p) for p in dataset["predictions"][:num_samples]]
predictions = [self._unpack(ast.literal_eval(p)) for p in dataset["predictions"][:num_samples]]
input_tokens = [ast.literal_eval(t) for t in dataset["input_tokens"][:num_samples]]
cont_tokens = [ast.literal_eval(t) for t in dataset["cont_tokens"][:num_samples]]
truncated = [ast.literal_eval(t)[0] for t in dataset["truncated"][:num_samples]]
Expand Down

0 comments on commit 3a22d93

Please sign in to comment.