Skip to content

Commit 60b97ae

Browse files
committed
fix code quanlity check
Signed-off-by: weimingc <17592131+meenchen@users.noreply.github.com>
1 parent 356f88d commit 60b97ae

2 files changed

Lines changed: 5 additions & 8 deletions

File tree

modelopt/torch/utils/dataset_utils.py

Lines changed: 2 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -19,7 +19,7 @@
1919
import json
2020
import os
2121
from collections.abc import Callable, Iterator
22-
from contextlib import contextmanager
22+
from contextlib import contextmanager, suppress
2323
from pathlib import Path
2424
from typing import TYPE_CHECKING, Any
2525
from warnings import warn
@@ -464,10 +464,8 @@ def _disable_use_cache(model: torch.nn.Module) -> Iterator[None]:
464464
if had_attr:
465465
config.use_cache = prev
466466
else:
467-
try:
467+
with suppress(AttributeError):
468468
delattr(config, "use_cache")
469-
except AttributeError:
470-
pass
471469

472470

473471
def get_max_batch_size(

tests/unit/torch/utils/test_dataset_utils.py

Lines changed: 3 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -195,10 +195,9 @@ def test_disable_use_cache_restores_on_exception():
195195
model.config = _Config()
196196
model.config.use_cache = True
197197

198-
with pytest.raises(RuntimeError, match="boom"):
199-
with _disable_use_cache(model):
200-
assert model.config.use_cache is False
201-
raise RuntimeError("boom")
198+
with pytest.raises(RuntimeError, match="boom"), _disable_use_cache(model):
199+
assert model.config.use_cache is False
200+
raise RuntimeError("boom")
202201

203202
assert model.config.use_cache is True
204203

0 commit comments

Comments
 (0)