From db23f6bc2b02673b2d27be7865fb0a52b50c5d13 Mon Sep 17 00:00:00 2001 From: Fei Yu Date: Fri, 19 Sep 2025 09:27:55 -0700 Subject: [PATCH] add back skip CPU containter test when using cuda device (#3385) Summary: as title, missed few tests without skip decorator causing CI CPU unit test to fail, somehow previous diff didn't reveal these ones that actually failed on CI, so adding these back separately. (https://www.internalfb.com/diff/D67302872?dst_version_fbid=815631520895451&transaction_fbid=1481902306176330) Reviewed By: spmex Differential Revision: D82771497 --- .../modules/tests/test_itep_embedding_modules.py | 15 +++++++++++++++ 1 file changed, 15 insertions(+) diff --git a/torchrec/modules/tests/test_itep_embedding_modules.py b/torchrec/modules/tests/test_itep_embedding_modules.py index fc089f631..1f1a4ee26 100644 --- a/torchrec/modules/tests/test_itep_embedding_modules.py +++ b/torchrec/modules/tests/test_itep_embedding_modules.py @@ -190,6 +190,11 @@ def generate_expected_address_lookup_buffer( return torch.tensor(address_lookup, dtype=torch.int64) + # pyre-ignore[56]: Pyre was not able to infer the type of argument + @unittest.skipIf( + torch.cuda.device_count() <= 1, + "Not enough GPUs, this test requires at least two GPUs", + ) def test_init_itep_module(self) -> None: itep_module = GenericITEPModule( table_name_to_unpruned_hash_sizes=self._table_name_to_unpruned_hash_sizes, @@ -222,6 +227,11 @@ def test_init_itep_module(self) -> None: equal_nan=True, ) + # pyre-ignore[56]: Pyre was not able to infer the type of argument + @unittest.skipIf( + torch.cuda.device_count() <= 1, + "Not enough GPUs, this test requires at least two GPUs", + ) def test_init_itep_module_without_pruned_table(self) -> None: itep_module = GenericITEPModule( table_name_to_unpruned_hash_sizes={}, @@ -353,6 +363,11 @@ def test_eval_forward( # Check that reset_weight_momentum is not called self.assertEqual(mock_reset_weight_momentum.call_count, 0) + # pyre-ignore[56]: Pyre was not able to infer the type of argument + @unittest.skipIf( + torch.cuda.device_count() <= 1, + "Not enough GPUs, this test requires at least two GPUs", + ) def test_iter_increment_per_forward(self) -> None: """Test that the iteration counter increments correctly with each forward pass.""" itep_module = GenericITEPModule(