From df7711dd589fd2ab5e831f41af1e16b1dc628da6 Mon Sep 17 00:00:00 2001 From: Shubham Chandravanshi Date: Thu, 25 Dec 2025 04:19:03 +0530 Subject: [PATCH 1/9] ENH: support additional dtypes in pad_nd Prefer the PyTorch padding backend when supported and safely fall back to NumPy on error. Add unit tests to validate backend selection and ensure output dtype is preserved. Signed-off-by: Shubham Chandravanshi --- monai/transforms/croppad/functional.py | 8 +-- .../transforms/croppad/test_pad_nd_dtypes.py | 58 +++++++++++++++++++ 2 files changed, 60 insertions(+), 6 deletions(-) create mode 100644 tests/transforms/croppad/test_pad_nd_dtypes.py diff --git a/monai/transforms/croppad/functional.py b/monai/transforms/croppad/functional.py index 653db43bc5..8b15a585c6 100644 --- a/monai/transforms/croppad/functional.py +++ b/monai/transforms/croppad/functional.py @@ -96,12 +96,8 @@ def pad_nd( return _np_pad(img, pad_width=to_pad, mode=mode, **kwargs) try: _pad = _np_pad - if mode in {"constant", "reflect", "edge", "replicate", "wrap", "circular"} and img.dtype not in { - torch.int16, - torch.int64, - torch.bool, - torch.uint8, - }: + if mode in {"constant", "reflect", "edge", "replicate", "wrap", "circular"}: + # Try PyTorch pad for these modes; fallback to NumPy on error. _pad = _pt_pad return _pad(img, pad_width=to_pad, mode=mode, **kwargs) except (ValueError, TypeError, RuntimeError) as err: diff --git a/tests/transforms/croppad/test_pad_nd_dtypes.py b/tests/transforms/croppad/test_pad_nd_dtypes.py new file mode 100644 index 0000000000..b619745cc3 --- /dev/null +++ b/tests/transforms/croppad/test_pad_nd_dtypes.py @@ -0,0 +1,58 @@ +# Copyright (c) MONAI Consortium +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# http://www.apache.org/licenses/LICENSE-2.0 +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + + +from __future__ import annotations + +from unittest.mock import Mock, patch + +import pytest +import torch + +import monai.transforms.croppad.functional as F +from monai.transforms.croppad.functional import pad_nd + + +def test_pad_uses_pt_for_bool(): + img = torch.ones((1, 4, 4), dtype=torch.bool) + to_pad = [(0, 0), (1, 1), (2, 2)] + with patch.object(F, "_pt_pad", wraps=F._pt_pad) as mock_pt, patch.object(F, "_np_pad", wraps=F._np_pad) as mock_np: + out = pad_nd(img, to_pad, mode="constant", value=0) + + assert mock_pt.called + assert not mock_np.called + assert out.dtype == img.dtype + + +def test_pad_falls_back_to_np_if_pt_raises(): + img = torch.ones((1, 4, 4), dtype=torch.bool) + to_pad = [(0, 0), (1, 1), (2, 2)] + with ( + patch.object(F, "_pt_pad", new=Mock(side_effect=NotImplementedError("no"))) as mock_pt, + patch.object(F, "_np_pad", wraps=F._np_pad) as mock_np, + ): + out = pad_nd(img, to_pad, mode="constant", value=0) + + assert mock_pt.called + assert mock_np.called + assert out.dtype == img.dtype + + +@pytest.mark.parametrize( + "dtype", [torch.bool, torch.int8, torch.int16, torch.int32, torch.int64, torch.uint8, torch.float32] +) +def test_pad_dtype_no_error_and_dtype_preserved(dtype): + img = torch.ones((1, 4, 4), dtype=dtype) + to_pad = [(0, 0), (1, 1), (2, 2)] + out = pad_nd(img, to_pad, mode="constant", value=0) + + assert out.shape == (1, 6, 8) + assert out.dtype == img.dtype From ad9f60afcd4562ad87753231c7d763598eb79ef0 Mon Sep 17 00:00:00 2001 From: Shubham Chandravanshi Date: Thu, 25 Dec 2025 23:05:15 +0530 Subject: [PATCH 2/9] FIX: ensure pad_nd handles value kwarg correctly across modes Strip value for non-constant modes for both PyTorch and NumPy backends and ensure reliable fallback behavior. Update tests to cover multiple padding modes and dtype preservation. Signed-off-by: Shubham Chandravanshi --- monai/transforms/croppad/functional.py | 11 +++++--- .../transforms/croppad/test_pad_nd_dtypes.py | 27 +++++++++++++------ 2 files changed, 27 insertions(+), 11 deletions(-) diff --git a/monai/transforms/croppad/functional.py b/monai/transforms/croppad/functional.py index 8b15a585c6..78ed3ba19f 100644 --- a/monai/transforms/croppad/functional.py +++ b/monai/transforms/croppad/functional.py @@ -99,12 +99,17 @@ def pad_nd( if mode in {"constant", "reflect", "edge", "replicate", "wrap", "circular"}: # Try PyTorch pad for these modes; fallback to NumPy on error. _pad = _pt_pad - return _pad(img, pad_width=to_pad, mode=mode, **kwargs) + call_kwargs = dict(kwargs) + if mode != "constant": + call_kwargs.pop("value", None) + return _pad(img, pad_width=to_pad, mode=mode, **call_kwargs) + except NotImplementedError: + return _np_pad(img, pad_width=to_pad, mode=mode, **call_kwargs) except (ValueError, TypeError, RuntimeError) as err: - if isinstance(err, NotImplementedError) or any( + if any( k in str(err) for k in ("supported", "unexpected keyword", "implemented", "value") ): - return _np_pad(img, pad_width=to_pad, mode=mode, **kwargs) + return _np_pad(img, pad_width=to_pad, mode=mode, **call_kwargs) raise ValueError( f"{img.shape} {to_pad} {mode} {kwargs} {img.dtype} {img.device if isinstance(img, torch.Tensor) else None}" ) from err diff --git a/tests/transforms/croppad/test_pad_nd_dtypes.py b/tests/transforms/croppad/test_pad_nd_dtypes.py index b619745cc3..a317b5783a 100644 --- a/tests/transforms/croppad/test_pad_nd_dtypes.py +++ b/tests/transforms/croppad/test_pad_nd_dtypes.py @@ -8,20 +8,19 @@ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. - - +""" +Tests for pad_nd dtype support and backend selection. +Validates PyTorch padding preference and NumPy fallback behavior. +""" from __future__ import annotations - from unittest.mock import Mock, patch - import pytest import torch - import monai.transforms.croppad.functional as F from monai.transforms.croppad.functional import pad_nd - def test_pad_uses_pt_for_bool(): + """Test that pad_nd uses PyTorch backend for bool dtype in constant mode.""" img = torch.ones((1, 4, 4), dtype=torch.bool) to_pad = [(0, 0), (1, 1), (2, 2)] with patch.object(F, "_pt_pad", wraps=F._pt_pad) as mock_pt, patch.object(F, "_np_pad", wraps=F._np_pad) as mock_np: @@ -31,8 +30,8 @@ def test_pad_uses_pt_for_bool(): assert not mock_np.called assert out.dtype == img.dtype - def test_pad_falls_back_to_np_if_pt_raises(): + """Test that pad_nd falls back to NumPy when PyTorch raises NotImplementedError.""" img = torch.ones((1, 4, 4), dtype=torch.bool) to_pad = [(0, 0), (1, 1), (2, 2)] with ( @@ -45,14 +44,26 @@ def test_pad_falls_back_to_np_if_pt_raises(): assert mock_np.called assert out.dtype == img.dtype - @pytest.mark.parametrize( "dtype", [torch.bool, torch.int8, torch.int16, torch.int32, torch.int64, torch.uint8, torch.float32] ) def test_pad_dtype_no_error_and_dtype_preserved(dtype): + """Test that pad_nd handles various dtypes without error and preserves dtype.""" img = torch.ones((1, 4, 4), dtype=dtype) to_pad = [(0, 0), (1, 1), (2, 2)] out = pad_nd(img, to_pad, mode="constant", value=0) assert out.shape == (1, 6, 8) assert out.dtype == img.dtype + +@pytest.mark.parametrize("mode", ["constant", "reflect", "replicate"]) +@pytest.mark.parametrize("dtype", [torch.bool, torch.int8, torch.float32]) +def test_pad_multiple_modes_dtype_preserved(mode, dtype): + """Test that pad_nd preserves dtype across multiple padding modes.""" + img = torch.ones((1, 4, 4), dtype=dtype) + to_pad = [(0, 0), (1, 1), (2, 2)] + + out = pad_nd(img, to_pad, mode=mode, value=0) + + assert out.shape == (1, 6, 8) + assert out.dtype == img.dtype From 399cf0dbb7bccf6996268256bf3c0a0f9dd6be1f Mon Sep 17 00:00:00 2001 From: Shubham Chandravanshi Date: Sat, 27 Dec 2025 19:49:12 +0530 Subject: [PATCH 3/9] FIX: clean pad_nd fallback handling and mode-specific kwargs in test Signed-off-by: Shubham Chandravanshi --- monai/transforms/croppad/functional.py | 8 ++++++-- tests/transforms/croppad/test_pad_nd_dtypes.py | 5 +++-- 2 files changed, 9 insertions(+), 4 deletions(-) diff --git a/monai/transforms/croppad/functional.py b/monai/transforms/croppad/functional.py index 78ed3ba19f..98cf2a1dfc 100644 --- a/monai/transforms/croppad/functional.py +++ b/monai/transforms/croppad/functional.py @@ -104,12 +104,16 @@ def pad_nd( call_kwargs.pop("value", None) return _pad(img, pad_width=to_pad, mode=mode, **call_kwargs) except NotImplementedError: - return _np_pad(img, pad_width=to_pad, mode=mode, **call_kwargs) + # PyTorch does not support this combination, fall back to NumPy + return _np_pad(img, pad_width=to_pad, mode=mode, **call_kwargs) except (ValueError, TypeError, RuntimeError) as err: + # PyTorch may raise generic errors for unsupported modes/dtypes or kwargs. + # Since there are no stable exception types for these cases, we fall back + # to NumPy by matching known error message patterns. if any( k in str(err) for k in ("supported", "unexpected keyword", "implemented", "value") ): - return _np_pad(img, pad_width=to_pad, mode=mode, **call_kwargs) + return _np_pad(img, pad_width=to_pad, mode=mode, **call_kwargs) raise ValueError( f"{img.shape} {to_pad} {mode} {kwargs} {img.dtype} {img.device if isinstance(img, torch.Tensor) else None}" ) from err diff --git a/tests/transforms/croppad/test_pad_nd_dtypes.py b/tests/transforms/croppad/test_pad_nd_dtypes.py index a317b5783a..260160ba5f 100644 --- a/tests/transforms/croppad/test_pad_nd_dtypes.py +++ b/tests/transforms/croppad/test_pad_nd_dtypes.py @@ -62,8 +62,9 @@ def test_pad_multiple_modes_dtype_preserved(mode, dtype): """Test that pad_nd preserves dtype across multiple padding modes.""" img = torch.ones((1, 4, 4), dtype=dtype) to_pad = [(0, 0), (1, 1), (2, 2)] - - out = pad_nd(img, to_pad, mode=mode, value=0) + + kwargs = {"value": 0} if mode == "constant" else {} + out = pad_nd(img, to_pad, mode=mode, **kwargs) assert out.shape == (1, 6, 8) assert out.dtype == img.dtype From 7aa63e7099da080d4c7b5d943f9b3cd3c66cd565 Mon Sep 17 00:00:00 2001 From: "pre-commit-ci[bot]" <66853113+pre-commit-ci[bot]@users.noreply.github.com> Date: Sat, 27 Dec 2025 14:20:04 +0000 Subject: [PATCH 4/9] [pre-commit.ci] auto fixes from pre-commit.com hooks for more information, see https://pre-commit.ci --- tests/transforms/croppad/test_pad_nd_dtypes.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/tests/transforms/croppad/test_pad_nd_dtypes.py b/tests/transforms/croppad/test_pad_nd_dtypes.py index 260160ba5f..5b87f0eaa7 100644 --- a/tests/transforms/croppad/test_pad_nd_dtypes.py +++ b/tests/transforms/croppad/test_pad_nd_dtypes.py @@ -62,7 +62,7 @@ def test_pad_multiple_modes_dtype_preserved(mode, dtype): """Test that pad_nd preserves dtype across multiple padding modes.""" img = torch.ones((1, 4, 4), dtype=dtype) to_pad = [(0, 0), (1, 1), (2, 2)] - + kwargs = {"value": 0} if mode == "constant" else {} out = pad_nd(img, to_pad, mode=mode, **kwargs) From dd26dea381cc1e4f2faff8c75b0e97365d529142 Mon Sep 17 00:00:00 2001 From: Shubham Chandravanshi Date: Wed, 21 Jan 2026 23:04:02 +0530 Subject: [PATCH 5/9] FIX: align pad_nd behavior and tests with MONAI conventions - Raise an explicit error when alue is provided with non-constant modes. - Rewrite tests using unittest + parameterized to match MONAI style. Signed-off-by: Shubham Chandravanshi --- monai/transforms/croppad/functional.py | 4 +- .../transforms/croppad/test_pad_nd_dtypes.py | 129 +++++++++++------- 2 files changed, 84 insertions(+), 49 deletions(-) diff --git a/monai/transforms/croppad/functional.py b/monai/transforms/croppad/functional.py index 98cf2a1dfc..627a83abc1 100644 --- a/monai/transforms/croppad/functional.py +++ b/monai/transforms/croppad/functional.py @@ -100,8 +100,8 @@ def pad_nd( # Try PyTorch pad for these modes; fallback to NumPy on error. _pad = _pt_pad call_kwargs = dict(kwargs) - if mode != "constant": - call_kwargs.pop("value", None) + if mode != "constant" and "value" in call_kwargs: + raise ValueError("'value' argument is only valid when mode='constant'") return _pad(img, pad_width=to_pad, mode=mode, **call_kwargs) except NotImplementedError: # PyTorch does not support this combination, fall back to NumPy diff --git a/tests/transforms/croppad/test_pad_nd_dtypes.py b/tests/transforms/croppad/test_pad_nd_dtypes.py index 260160ba5f..9ae087e997 100644 --- a/tests/transforms/croppad/test_pad_nd_dtypes.py +++ b/tests/transforms/croppad/test_pad_nd_dtypes.py @@ -13,58 +13,93 @@ Validates PyTorch padding preference and NumPy fallback behavior. """ from __future__ import annotations + +import unittest from unittest.mock import Mock, patch -import pytest + +from parameterized.parameterized import parameterized + import torch + import monai.transforms.croppad.functional as F from monai.transforms.croppad.functional import pad_nd -def test_pad_uses_pt_for_bool(): - """Test that pad_nd uses PyTorch backend for bool dtype in constant mode.""" - img = torch.ones((1, 4, 4), dtype=torch.bool) - to_pad = [(0, 0), (1, 1), (2, 2)] - with patch.object(F, "_pt_pad", wraps=F._pt_pad) as mock_pt, patch.object(F, "_np_pad", wraps=F._np_pad) as mock_np: - out = pad_nd(img, to_pad, mode="constant", value=0) - assert mock_pt.called - assert not mock_np.called - assert out.dtype == img.dtype - -def test_pad_falls_back_to_np_if_pt_raises(): - """Test that pad_nd falls back to NumPy when PyTorch raises NotImplementedError.""" - img = torch.ones((1, 4, 4), dtype=torch.bool) - to_pad = [(0, 0), (1, 1), (2, 2)] - with ( - patch.object(F, "_pt_pad", new=Mock(side_effect=NotImplementedError("no"))) as mock_pt, - patch.object(F, "_np_pad", wraps=F._np_pad) as mock_np, - ): +class TestPadNdDtypes(unittest.TestCase): + def test_pad_uses_pt_for_bool(self): + """Test that pad_nd uses PyTorch backend for bool dtype in constant mode.""" + img = torch.ones((1, 4, 4), dtype=torch.bool) + to_pad = [(0, 0), (1, 1), (2, 2)] + with patch.object(F, "_pt_pad", wraps=F._pt_pad) as mock_pt, patch.object(F, "_np_pad", wraps=F._np_pad) as mock_np: + out = pad_nd(img, to_pad, mode="constant", value=0) + + self.assertTrue(mock_pt.called) + self.assertFalse(mock_np.called) + self.assertEqual(out.dtype, img.dtype) + self.assertEqual(out.shape, (1, 6, 8)) + + def test_pad_falls_back_to_np_if_pt_raises(self): + """Test that pad_nd falls back to NumPy when PyTorch raises NotImplementedError.""" + img = torch.ones((1, 4, 4), dtype=torch.bool) + to_pad = [(0, 0), (1, 1), (2, 2)] + with ( + patch.object(F, "_pt_pad", new=Mock(side_effect=NotImplementedError("no"))) as mock_pt, + patch.object(F, "_np_pad", wraps=F._np_pad) as mock_np, + ): + out = pad_nd(img, to_pad, mode="constant", value=0) + + self.assertTrue(mock_pt.called) + self.assertTrue(mock_np.called) + self.assertEqual(out.dtype, img.dtype) + self.assertEqual(out.shape, (1, 6, 8)) + + @parameterized.expand([ + torch.bool, + torch.int8, + torch.int16, + torch.int32, + torch.int64, + torch.uint8, + torch.float32, + ]) + def test_pad_dtype_no_error_and_dtype_preserved(self, dtype): + """Test that pad_nd handles various dtypes without error and preserves dtype.""" + img = torch.ones((1, 4, 4), dtype=dtype) + to_pad = [(0, 0), (1, 1), (2, 2)] out = pad_nd(img, to_pad, mode="constant", value=0) - assert mock_pt.called - assert mock_np.called - assert out.dtype == img.dtype - -@pytest.mark.parametrize( - "dtype", [torch.bool, torch.int8, torch.int16, torch.int32, torch.int64, torch.uint8, torch.float32] -) -def test_pad_dtype_no_error_and_dtype_preserved(dtype): - """Test that pad_nd handles various dtypes without error and preserves dtype.""" - img = torch.ones((1, 4, 4), dtype=dtype) - to_pad = [(0, 0), (1, 1), (2, 2)] - out = pad_nd(img, to_pad, mode="constant", value=0) - - assert out.shape == (1, 6, 8) - assert out.dtype == img.dtype - -@pytest.mark.parametrize("mode", ["constant", "reflect", "replicate"]) -@pytest.mark.parametrize("dtype", [torch.bool, torch.int8, torch.float32]) -def test_pad_multiple_modes_dtype_preserved(mode, dtype): - """Test that pad_nd preserves dtype across multiple padding modes.""" - img = torch.ones((1, 4, 4), dtype=dtype) - to_pad = [(0, 0), (1, 1), (2, 2)] - - kwargs = {"value": 0} if mode == "constant" else {} - out = pad_nd(img, to_pad, mode=mode, **kwargs) - - assert out.shape == (1, 6, 8) - assert out.dtype == img.dtype + self.assertEqual(out.shape, (1, 6, 8)) + self.assertEqual(out.dtype, img.dtype) + + @parameterized.expand([ + ("constant", torch.bool), + ("constant", torch.int8), + ("constant", torch.float32), + ("reflect", torch.bool), + ("reflect", torch.int8), + ("reflect", torch.float32), + ("replicate", torch.bool), + ("replicate", torch.int8), + ("replicate", torch.float32), + ]) + def test_pad_multiple_modes_dtype_preserved(self, mode, dtype): + """Test that pad_nd preserves dtype across multiple padding modes.""" + img = torch.ones((1, 4, 4), dtype=dtype) + to_pad = [(0, 0), (1, 1), (2, 2)] + + kwargs = {"value": 0} if mode == "constant" else {} + out = pad_nd(img, to_pad, mode=mode, **kwargs) + + self.assertEqual(out.shape, (1, 6, 8)) + self.assertEqual(out.dtype, img.dtype) + + def test_value_with_non_constant_mode_raises(self): + """Test that pad_nd raises ValueError when 'value' is provided with non-constant mode.""" + img = torch.ones((1, 4, 4)) + to_pad = [(0, 0), (1, 1), (2, 2)] + with self.assertRaises(ValueError): + pad_nd(img, to_pad, mode="reflect", **{"value": 0}) + + +if __name__ == "__main__": + unittest.main() From 6440db62b6171b13f110f52234aad63ecef93c68 Mon Sep 17 00:00:00 2001 From: "pre-commit-ci[bot]" <66853113+pre-commit-ci[bot]@users.noreply.github.com> Date: Wed, 21 Jan 2026 17:46:28 +0000 Subject: [PATCH 6/9] [pre-commit.ci] auto fixes from pre-commit.com hooks for more information, see https://pre-commit.ci --- tests/transforms/croppad/test_pad_nd_dtypes.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/tests/transforms/croppad/test_pad_nd_dtypes.py b/tests/transforms/croppad/test_pad_nd_dtypes.py index c9feabaa17..233894ab72 100644 --- a/tests/transforms/croppad/test_pad_nd_dtypes.py +++ b/tests/transforms/croppad/test_pad_nd_dtypes.py @@ -86,7 +86,7 @@ def test_pad_multiple_modes_dtype_preserved(self, mode, dtype): """Test that pad_nd preserves dtype across multiple padding modes.""" img = torch.ones((1, 4, 4), dtype=dtype) to_pad = [(0, 0), (1, 1), (2, 2)] - + kwargs = {"value": 0} if mode == "constant" else {} out = pad_nd(img, to_pad, mode=mode, **kwargs) @@ -102,4 +102,4 @@ def test_value_with_non_constant_mode_raises(self): if __name__ == "__main__": - unittest.main() \ No newline at end of file + unittest.main() From 9385587feeace0da41ce99f8cc954ac2b19d87e5 Mon Sep 17 00:00:00 2001 From: Shubham Chandravanshi Date: Thu, 22 Jan 2026 00:42:44 +0530 Subject: [PATCH 7/9] Resolve incorrectly Imports sorted and/or formatted. Signed-off-by: Shubham Chandravanshi --- monai/transforms/croppad/functional.py | 4 +- .../transforms/croppad/test_pad_nd_dtypes.py | 46 +++++++++---------- 2 files changed, 22 insertions(+), 28 deletions(-) diff --git a/monai/transforms/croppad/functional.py b/monai/transforms/croppad/functional.py index 627a83abc1..b01e5b5aa4 100644 --- a/monai/transforms/croppad/functional.py +++ b/monai/transforms/croppad/functional.py @@ -110,9 +110,7 @@ def pad_nd( # PyTorch may raise generic errors for unsupported modes/dtypes or kwargs. # Since there are no stable exception types for these cases, we fall back # to NumPy by matching known error message patterns. - if any( - k in str(err) for k in ("supported", "unexpected keyword", "implemented", "value") - ): + if any(k in str(err) for k in ("supported", "unexpected keyword", "implemented", "value")): return _np_pad(img, pad_width=to_pad, mode=mode, **call_kwargs) raise ValueError( f"{img.shape} {to_pad} {mode} {kwargs} {img.dtype} {img.device if isinstance(img, torch.Tensor) else None}" diff --git a/tests/transforms/croppad/test_pad_nd_dtypes.py b/tests/transforms/croppad/test_pad_nd_dtypes.py index c9feabaa17..bc00a7bedb 100644 --- a/tests/transforms/croppad/test_pad_nd_dtypes.py +++ b/tests/transforms/croppad/test_pad_nd_dtypes.py @@ -17,9 +17,8 @@ import unittest from unittest.mock import Mock, patch -from parameterized.parameterized import parameterized - import torch +from parameterized.parameterized import parameterized import monai.transforms.croppad.functional as F from monai.transforms.croppad.functional import pad_nd @@ -30,7 +29,10 @@ def test_pad_uses_pt_for_bool(self): """Test that pad_nd uses PyTorch backend for bool dtype in constant mode.""" img = torch.ones((1, 4, 4), dtype=torch.bool) to_pad = [(0, 0), (1, 1), (2, 2)] - with patch.object(F, "_pt_pad", wraps=F._pt_pad) as mock_pt, patch.object(F, "_np_pad", wraps=F._np_pad) as mock_np: + with ( + patch.object(F, "_pt_pad", wraps=F._pt_pad) as mock_pt, + patch.object(F, "_np_pad", wraps=F._np_pad) as mock_np, + ): out = pad_nd(img, to_pad, mode="constant", value=0) self.assertTrue(mock_pt.called) @@ -53,15 +55,7 @@ def test_pad_falls_back_to_np_if_pt_raises(self): self.assertEqual(out.dtype, img.dtype) self.assertEqual(out.shape, (1, 6, 8)) - @parameterized.expand([ - torch.bool, - torch.int8, - torch.int16, - torch.int32, - torch.int64, - torch.uint8, - torch.float32, - ]) + @parameterized.expand([torch.bool, torch.int8, torch.int16, torch.int32, torch.int64, torch.uint8, torch.float32]) def test_pad_dtype_no_error_and_dtype_preserved(self, dtype): """Test that pad_nd handles various dtypes without error and preserves dtype.""" img = torch.ones((1, 4, 4), dtype=dtype) @@ -71,22 +65,24 @@ def test_pad_dtype_no_error_and_dtype_preserved(self, dtype): self.assertEqual(out.shape, (1, 6, 8)) self.assertEqual(out.dtype, img.dtype) - @parameterized.expand([ - ("constant", torch.bool), - ("constant", torch.int8), - ("constant", torch.float32), - ("reflect", torch.bool), - ("reflect", torch.int8), - ("reflect", torch.float32), - ("replicate", torch.bool), - ("replicate", torch.int8), - ("replicate", torch.float32), - ]) + @parameterized.expand( + [ + ("constant", torch.bool), + ("constant", torch.int8), + ("constant", torch.float32), + ("reflect", torch.bool), + ("reflect", torch.int8), + ("reflect", torch.float32), + ("replicate", torch.bool), + ("replicate", torch.int8), + ("replicate", torch.float32), + ] + ) def test_pad_multiple_modes_dtype_preserved(self, mode, dtype): """Test that pad_nd preserves dtype across multiple padding modes.""" img = torch.ones((1, 4, 4), dtype=dtype) to_pad = [(0, 0), (1, 1), (2, 2)] - + kwargs = {"value": 0} if mode == "constant" else {} out = pad_nd(img, to_pad, mode=mode, **kwargs) @@ -102,4 +98,4 @@ def test_value_with_non_constant_mode_raises(self): if __name__ == "__main__": - unittest.main() \ No newline at end of file + unittest.main() From e45b2c217f1159bde98ef77c8e2e14c93fabf22b Mon Sep 17 00:00:00 2001 From: Shubham Chandravanshi Date: Sat, 24 Jan 2026 01:44:35 +0530 Subject: [PATCH 8/9] address minor changes review comments in pad_nd and tests Signed-off-by: Shubham Chandravanshi --- monai/transforms/croppad/functional.py | 9 +++--- .../transforms/croppad/test_pad_nd_dtypes.py | 31 ++++++++++--------- 2 files changed, 20 insertions(+), 20 deletions(-) diff --git a/monai/transforms/croppad/functional.py b/monai/transforms/croppad/functional.py index b01e5b5aa4..dce282cb23 100644 --- a/monai/transforms/croppad/functional.py +++ b/monai/transforms/croppad/functional.py @@ -99,19 +99,18 @@ def pad_nd( if mode in {"constant", "reflect", "edge", "replicate", "wrap", "circular"}: # Try PyTorch pad for these modes; fallback to NumPy on error. _pad = _pt_pad - call_kwargs = dict(kwargs) - if mode != "constant" and "value" in call_kwargs: + if mode != "constant" and "value" in kwargs: raise ValueError("'value' argument is only valid when mode='constant'") - return _pad(img, pad_width=to_pad, mode=mode, **call_kwargs) + return _pad(img, pad_width=to_pad, mode=mode, **kwargs) except NotImplementedError: # PyTorch does not support this combination, fall back to NumPy - return _np_pad(img, pad_width=to_pad, mode=mode, **call_kwargs) + return _np_pad(img, pad_width=to_pad, mode=mode, **kwargs) except (ValueError, TypeError, RuntimeError) as err: # PyTorch may raise generic errors for unsupported modes/dtypes or kwargs. # Since there are no stable exception types for these cases, we fall back # to NumPy by matching known error message patterns. if any(k in str(err) for k in ("supported", "unexpected keyword", "implemented", "value")): - return _np_pad(img, pad_width=to_pad, mode=mode, **call_kwargs) + return _np_pad(img, pad_width=to_pad, mode=mode, **kwargs) raise ValueError( f"{img.shape} {to_pad} {mode} {kwargs} {img.dtype} {img.device if isinstance(img, torch.Tensor) else None}" ) from err diff --git a/tests/transforms/croppad/test_pad_nd_dtypes.py b/tests/transforms/croppad/test_pad_nd_dtypes.py index bc00a7bedb..ce8a4def0b 100644 --- a/tests/transforms/croppad/test_pad_nd_dtypes.py +++ b/tests/transforms/croppad/test_pad_nd_dtypes.py @@ -23,6 +23,19 @@ import monai.transforms.croppad.functional as F from monai.transforms.croppad.functional import pad_nd +DTYPES = [torch.bool, torch.int8, torch.int16, torch.int32, torch.int64, torch.uint8, torch.float32] +MODES_DTYPES = [ + ("constant", torch.bool), + ("constant", torch.int8), + ("constant", torch.float32), + ("reflect", torch.bool), + ("reflect", torch.int8), + ("reflect", torch.float32), + ("replicate", torch.bool), + ("replicate", torch.int8), + ("replicate", torch.float32), +] + class TestPadNdDtypes(unittest.TestCase): def test_pad_uses_pt_for_bool(self): @@ -55,7 +68,7 @@ def test_pad_falls_back_to_np_if_pt_raises(self): self.assertEqual(out.dtype, img.dtype) self.assertEqual(out.shape, (1, 6, 8)) - @parameterized.expand([torch.bool, torch.int8, torch.int16, torch.int32, torch.int64, torch.uint8, torch.float32]) + @parameterized.expand(DTYPES) def test_pad_dtype_no_error_and_dtype_preserved(self, dtype): """Test that pad_nd handles various dtypes without error and preserves dtype.""" img = torch.ones((1, 4, 4), dtype=dtype) @@ -65,19 +78,7 @@ def test_pad_dtype_no_error_and_dtype_preserved(self, dtype): self.assertEqual(out.shape, (1, 6, 8)) self.assertEqual(out.dtype, img.dtype) - @parameterized.expand( - [ - ("constant", torch.bool), - ("constant", torch.int8), - ("constant", torch.float32), - ("reflect", torch.bool), - ("reflect", torch.int8), - ("reflect", torch.float32), - ("replicate", torch.bool), - ("replicate", torch.int8), - ("replicate", torch.float32), - ] - ) + @parameterized.expand(MODES_DTYPES) def test_pad_multiple_modes_dtype_preserved(self, mode, dtype): """Test that pad_nd preserves dtype across multiple padding modes.""" img = torch.ones((1, 4, 4), dtype=dtype) @@ -94,7 +95,7 @@ def test_value_with_non_constant_mode_raises(self): img = torch.ones((1, 4, 4)) to_pad = [(0, 0), (1, 1), (2, 2)] with self.assertRaises(ValueError): - pad_nd(img, to_pad, mode="reflect", **{"value": 0}) + pad_nd(img, to_pad, mode="reflect", value=0) if __name__ == "__main__": From 70e5fae7542078822664facf97551faf39b1c91a Mon Sep 17 00:00:00 2001 From: Shubham Chandravanshi Date: Sat, 24 Jan 2026 03:59:30 +0530 Subject: [PATCH 9/9] FIX: apply CodeRabbit suggestions for pad_nd value handling Signed-off-by: Shubham Chandravanshi --- monai/transforms/croppad/functional.py | 6 ++++-- tests/transforms/croppad/test_pad_nd_dtypes.py | 11 +++++++++-- 2 files changed, 13 insertions(+), 4 deletions(-) diff --git a/monai/transforms/croppad/functional.py b/monai/transforms/croppad/functional.py index dce282cb23..acf42849d3 100644 --- a/monai/transforms/croppad/functional.py +++ b/monai/transforms/croppad/functional.py @@ -91,7 +91,11 @@ def pad_nd( https://pytorch.org/docs/stable/generated/torch.nn.functional.pad.html kwargs: other arguments for the `np.pad` or `torch.pad` function. note that `np.pad` treats channel dimension as the first dimension. + Raises: + ValueError: If `value` is provided when `mode` is not ``"constant"``. """ + if mode != "constant" and "value" in kwargs: + raise ValueError("'value' argument is only valid when mode='constant'") if mode in {"linear_ramp", "maximum", "mean", "median", "minimum", "symmetric", "empty"}: return _np_pad(img, pad_width=to_pad, mode=mode, **kwargs) try: @@ -99,8 +103,6 @@ def pad_nd( if mode in {"constant", "reflect", "edge", "replicate", "wrap", "circular"}: # Try PyTorch pad for these modes; fallback to NumPy on error. _pad = _pt_pad - if mode != "constant" and "value" in kwargs: - raise ValueError("'value' argument is only valid when mode='constant'") return _pad(img, pad_width=to_pad, mode=mode, **kwargs) except NotImplementedError: # PyTorch does not support this combination, fall back to NumPy diff --git a/tests/transforms/croppad/test_pad_nd_dtypes.py b/tests/transforms/croppad/test_pad_nd_dtypes.py index ce8a4def0b..7fa633b8aa 100644 --- a/tests/transforms/croppad/test_pad_nd_dtypes.py +++ b/tests/transforms/croppad/test_pad_nd_dtypes.py @@ -70,7 +70,10 @@ def test_pad_falls_back_to_np_if_pt_raises(self): @parameterized.expand(DTYPES) def test_pad_dtype_no_error_and_dtype_preserved(self, dtype): - """Test that pad_nd handles various dtypes without error and preserves dtype.""" + """Test that pad_nd handles various dtypes without error and preserves dtype. + Args: + dtype: Input dtype under test. + """ img = torch.ones((1, 4, 4), dtype=dtype) to_pad = [(0, 0), (1, 1), (2, 2)] out = pad_nd(img, to_pad, mode="constant", value=0) @@ -80,7 +83,11 @@ def test_pad_dtype_no_error_and_dtype_preserved(self, dtype): @parameterized.expand(MODES_DTYPES) def test_pad_multiple_modes_dtype_preserved(self, mode, dtype): - """Test that pad_nd preserves dtype across multiple padding modes.""" + """Test that pad_nd preserves dtype across multiple padding modes. + Args: + mode: Padding mode under test. + dtype: Input dtype under test. + """ img = torch.ones((1, 4, 4), dtype=dtype) to_pad = [(0, 0), (1, 1), (2, 2)]