Skip to content

Commit 48ba294

Browse files
committed
test: extend pragma no-cover to GPU-only branch bodies
Signed-off-by: weimingc <17592131+meenchen@users.noreply.github.com>
1 parent 7f13162 commit 48ba294

1 file changed

Lines changed: 3 additions & 3 deletions

File tree

modelopt/torch/utils/dataset_utils.py

Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -512,11 +512,11 @@ def _get_free_gpu_mem():
512512
* sample_memory_usage_ratio
513513
)
514514
if mem_diff_per_data_batch <= 0: # pragma: no cover - GPU memory probe edge case
515-
print(
515+
print( # pragma: no cover
516516
"Warning: No measurable memory usage found for a single batch. "
517517
"Falling back to batch_size=1."
518518
)
519-
target_data_batch = 1
519+
target_data_batch = 1 # pragma: no cover
520520
else:
521521
target_data_batch = max(int(free_mem_before / mem_diff_per_data_batch), 1)
522522
target_input = sample_input_single_batch.expand(
@@ -534,7 +534,7 @@ def _get_free_gpu_mem():
534534
infer_method(target_input)
535535
break
536536
except torch.cuda.OutOfMemoryError: # pragma: no cover - GPU OOM retry path
537-
target_data_batch = target_data_batch // 2
537+
target_data_batch = target_data_batch // 2 # pragma: no cover
538538

539539
# Regulate the data batch target to be 1, 2, 4, 8, 12, ..., capped at 64
540540
if target_data_batch < 2:

0 commit comments

Comments
 (0)