From 94fa5f1d76a4e13f16900f24438ebc912ea3ef6d Mon Sep 17 00:00:00 2001 From: menamerai Date: Sat, 10 Feb 2024 00:11:53 -0500 Subject: [PATCH] shorten docstring --- src/delphi/eval/token_map.py | 11 +---------- 1 file changed, 1 insertion(+), 10 deletions(-) diff --git a/src/delphi/eval/token_map.py b/src/delphi/eval/token_map.py index 2bc9e9b7..3c021724 100644 --- a/src/delphi/eval/token_map.py +++ b/src/delphi/eval/token_map.py @@ -9,17 +9,8 @@ def token_map( tokenized_dataset: Dataset, output_path: str | None = None, ) -> dict[int, list[tuple[int, int]]]: - """Return a mapping of tokens to their (prompt_idx, token_idx) locations in the tokenized_dataset. + """Return a mapping of tokens to their (prompt_idx, token_idx) locations in the tokenized_dataset.""" - Args: - tokenized_dataset (Dataset): A tokenized dataset. - save_output (bool, optional): Whether to save the output to a file. Defaults to True. - output_path (str, optional): The output file path. Defaults to "/data/token_map.pkl". - - Returns: - dict[int, list[tuple[int, int]]]: A mapping of tokens to their (prompt_idx, token_idx) - locations in the tokenized_dataset. - """ mapping = {} for prompt_idx, prompt in enumerate(tokenized_dataset): prompt = cast(dict, prompt)