Skip to content

Commit

Permalink
[BC breaking] Remove check_sparse_nnz argument of gradcheck (pytorch#…
Browse files Browse the repository at this point in the history
…115658)

As in title per deprecation plan.

Pull Request resolved: pytorch#115658
Approved by: https://github.com/cpuhrsch, https://github.com/soulitzer
  • Loading branch information
pearu authored and pytorchmergebot committed Dec 13, 2023
1 parent 310f6ab commit 193f878
Show file tree
Hide file tree
Showing 2 changed files with 0 additions and 76 deletions.
52 changes: 0 additions & 52 deletions test/test_sparse.py
Original file line number Diff line number Diff line change
Expand Up @@ -162,58 +162,6 @@ def f5():
# Check warn-once:
self.assertEqual(len(cm.warnings), 1)

@parametrize('fast_mode', (True, False))
def test_gradcheck_check_sparse_nnz(self, fast_mode):
"""Tests for deprecated check_sparse_nnz keyword argument of gradcheck.
Deprecation steps:
2.1: Specification of check_sparse_nnz triggers a warning.
2.2: Specification of check_sparse_nnz triggers an
exception. Remove all check_sparse_nnz usages from
gradcheck and delete this test.
"""
def fn(x, masked_grad):
return x.to_dense(masked_grad=masked_grad)

def test(x, masked_grad, masked, check_sparse_nnz):
x = x.detach().clone().requires_grad_()
torch.autograd.gradcheck(fn, (x, masked_grad), masked=masked, check_sparse_nnz=check_sparse_nnz, fast_mode=fast_mode)

x = torch.tensor([[0, 2], [3, 4]], dtype=torch.float64).to_sparse()

for masked_grad, masked, check_sparse_nnz in itertools.product(*[(True, False, None)] * 3):
effective_masked_grad = True if masked_grad is None else masked_grad
effective_check_sparse_nnz = False if check_sparse_nnz is None else check_sparse_nnz
# For BC, the effective masked depends on the value of specified check_sparse_nnz:
effective_masked = (check_sparse_nnz if check_sparse_nnz is not None else False) if masked is None else masked

warn_using_check_sparse_nnz = self.assertWarns(
UserWarning,
msg=('Backwards compatibility: check_sparse_nnz is deprecated, it will be removed in a future version of PyTorch.'
f' Use masked={effective_check_sparse_nnz} instead.'))
raise_on_non_equal_masked_and_check_sparse_nnz = self.assertRaisesRegex(
ValueError,
f"Expected specified check_sparse_nnz [(]={effective_check_sparse_nnz}[)]"
f" to be equal to masked [(]={effective_masked}[)]")
raise_jacobian_mismatch = self.assertRaisesRegex(RuntimeError, "Jacobian mismatch for output 0 with respect to input 0")

def run_test():
if effective_masked_grad != effective_masked and not fast_mode:
with raise_jacobian_mismatch:
test(x, masked_grad, masked, check_sparse_nnz)
else:
test(x, masked_grad, masked, check_sparse_nnz)

if masked != check_sparse_nnz and None not in {masked, check_sparse_nnz}:
# the specified masked and check_sparse_nnz must match
with warn_using_check_sparse_nnz:
with raise_on_non_equal_masked_and_check_sparse_nnz:
test(x, masked_grad, masked, check_sparse_nnz)
elif check_sparse_nnz is not None:
with warn_using_check_sparse_nnz:
run_test()
else:
self.assertNotWarn(run_test)

class TestSparseBase(TestCase):
def run(self, result=None):
Expand Down
24 changes: 0 additions & 24 deletions torch/autograd/gradcheck.py
Original file line number Diff line number Diff line change
Expand Up @@ -1951,7 +1951,6 @@ def gradcheck(
atol: float = 1e-5,
rtol: float = 1e-3,
raise_exception: bool = True,
check_sparse_nnz: Optional[bool] = None,
nondet_tol: float = 0.0,
check_undefined_grad: bool = True,
check_grad_dtypes: bool = False,
Expand Down Expand Up @@ -2006,12 +2005,6 @@ def gradcheck(
raise_exception (bool, optional): indicating whether to raise an exception if
the check fails. The exception gives more information about the
exact nature of the failure. This is helpful when debugging gradchecks.
check_sparse_nnz (bool, optional): if ``True``, gradcheck allows
for SparseTensor input, and for any SparseTensor inputs,
gradcheck will perform its check at ``nnz`` positions only.
The ``check_sparse_nnz`` argument is deprecated, use the
``masked`` argument instead. If ``check_sparse_nnz != masked``, an
exception is raised.
nondet_tol (float, optional): tolerance for non-determinism. When running
identical inputs through the differentiation, the results must either match
exactly (default, 0.0) or be within this tolerance.
Expand All @@ -2035,22 +2028,6 @@ def gradcheck(
``True`` if all differences satisfy allclose condition
"""
if check_sparse_nnz is None:
if masked is None:
check_sparse_nnz = masked = False
else:
check_sparse_nnz = masked
else:
warnings.warn(
"Backwards compatibility: check_sparse_nnz is deprecated, it will be removed in a future version of PyTorch."
f" Use masked={check_sparse_nnz} instead."
)
if masked is None:
masked = check_sparse_nnz
elif check_sparse_nnz != masked:
raise ValueError(
f"Expected specified check_sparse_nnz (={check_sparse_nnz}) to be equal to masked (={masked})."
)
assert (
check_forward_ad or check_backward_ad
), "Expected at least one of check_forward_ad or check_backward_ad to be True"
Expand All @@ -2062,7 +2039,6 @@ def gradcheck(
), "Setting check_batched_forward_grad=True requires check_forward_ad to be True"
args = locals().copy()
args.pop("raise_exception")
args.pop("check_sparse_nnz")
if not raise_exception:
try:
return _gradcheck_helper(**args)
Expand Down

0 comments on commit 193f878

Please sign in to comment.