Skip to content

Commit 0cbacec

Browse files
committed
Revert "pass EOS token id to validate"
Signed-off-by: Sukriti-Sharma4 <sukriti.sharma4@ibm.com>
1 parent 9dc90ac commit 0cbacec

5 files changed

Lines changed: 0 additions & 11 deletions

File tree

aiu_fms_testing_utils/scripts/drive_paged_programs.py

Lines changed: 0 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -682,7 +682,6 @@ def __metric_calculator(r: torch.Tensor, t: torch.Tensor):
682682
max_new_tokens,
683683
LogitsExtractorHook(),
684684
attn_algorithm="math",
685-
eos_token_id=tokenizer.eos_token_id,
686685
**extra_kwargs,
687686
)
688687
# save the cpu validation info for later consumption
@@ -707,7 +706,6 @@ def __metric_calculator(r: torch.Tensor, t: torch.Tensor):
707706
input_ids,
708707
max_new_tokens,
709708
GoldenTokenHook(cpu_validation_info.get_info("tokens")),
710-
eos_token_id=tokenizer.eos_token_id,
711709
last_n_tokens=64,
712710
timing=TIMING,
713711
prefill_chunk_size=args.prefill_chunk_size,
@@ -753,7 +751,6 @@ def __metric_calculator(r: torch.Tensor, t: torch.Tensor):
753751
input_ids,
754752
max_new_tokens,
755753
None,
756-
eos_token_id=tokenizer.eos_token_id,
757754
last_n_tokens=64,
758755
timing=TIMING,
759756
prefill_chunk_size=args.prefill_chunk_size,
@@ -797,7 +794,6 @@ def __metric_calculator(r: torch.Tensor, t: torch.Tensor):
797794
input_ids,
798795
max_new_tokens,
799796
None,
800-
eos_token_id=tokenizer.eos_token_id,
801797
last_n_tokens=64,
802798
timing=TIMING,
803799
prefill_chunk_size=args.prefill_chunk_size,

aiu_fms_testing_utils/scripts/generate_metrics.py

Lines changed: 0 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -245,7 +245,6 @@ def write_csv(metrics, path, metric_name):
245245
args.max_new_tokens,
246246
LogitsExtractorHook(),
247247
attn_algorithm="math",
248-
eos_token_id=tokenizer.eos_token_id,
249248
**padding_kwargs,
250249
)
251250
cpu_static_tokens = cpu_validation_info.get_info("tokens")
@@ -260,7 +259,6 @@ def write_csv(metrics, path, metric_name):
260259
ids.to("cuda"),
261260
args.max_new_tokens,
262261
None,
263-
eos_token_id=tokenizer.eos_token_id,
264262
last_n_tokens=1,
265263
**{k: v.to("cuda") for k, v in padding_kwargs.items()},
266264
)
@@ -327,7 +325,6 @@ def write_csv(metrics, path, metric_name):
327325
args.max_new_tokens,
328326
LogitsExtractorHook(),
329327
attn_algorithm="math",
330-
eos_token_id=tokenizer.eos_token_id,
331328
**padding_kwargs,
332329
)
333330

@@ -337,7 +334,6 @@ def write_csv(metrics, path, metric_name):
337334
ids.to("cuda"),
338335
args.max_new_tokens,
339336
GoldenTokenHook(cpu_validation_info.get_info("tokens"), "cuda"),
340-
eos_token_id=tokenizer.eos_token_id,
341337
last_n_tokens=1,
342338
**{k: v.to("cuda") for k, v in padding_kwargs.items()},
343339
)

aiu_fms_testing_utils/scripts/save_cpu_data.py

Lines changed: 0 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -97,7 +97,6 @@ def process_row(row):
9797
max_new_tokens,
9898
LogitsExtractorHook(),
9999
attn_algorithm="math",
100-
eos_token_id=tokenizer.eos_token_id,
101100
)
102101
return {"id": id, "input_ids": input_ids, "validation": cpu_validation_info}
103102

aiu_fms_testing_utils/scripts/validation.py

Lines changed: 0 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -690,7 +690,6 @@ def print_result(result, result_idx: int = 0, file_prefix: str = ""):
690690
ids.to(validation_device),
691691
args.max_new_tokens,
692692
LogitsExtractorHook(),
693-
eos_token_id=None if args.no_early_termination else tokenizer.eos_token_id,
694693
attn_algorithm="math",
695694
**padding_kwargs,
696695
)

tests/models/test_decoders.py

Lines changed: 0 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -612,7 +612,6 @@ def _get_device_validation_information(
612612
max_new_tokens,
613613
post_iteration_hook,
614614
timing=TIMING,
615-
eos_token_id=tokenizer.eos_token_id,
616615
**extra_kwargs,
617616
**device_dependent_kwargs,
618617
)

0 commit comments

Comments
 (0)