From fb9b73863bf01a4f192c974e6f9e67c388c84be8 Mon Sep 17 00:00:00 2001 From: Joanne Chou Date: Sun, 27 Jul 2025 01:31:53 -0700 Subject: [PATCH] handle empty pooled_embedding Summary: to handle the case that pooled_embedding is empty list cannot be cat, return the empty tensor with shape [batch size, 0] Differential Revision: D79050833 --- torchrec/modules/embedding_modules.py | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/torchrec/modules/embedding_modules.py b/torchrec/modules/embedding_modules.py index 00726e3c2..e21f14805 100644 --- a/torchrec/modules/embedding_modules.py +++ b/torchrec/modules/embedding_modules.py @@ -46,6 +46,10 @@ def process_pooled_embeddings( pooled_embeddings = torch.ops.fbgemm.group_index_select_dim0( pooled_embeddings, list(torch.unbind(inverse_indices)) ) + if not pooled_embeddings: + # Return a tensor with shape [batch_size, 0] if pooled_embeddings is empty list + batch_size = inverse_indices.shape[0] + return torch.zeros((batch_size, 0), device=inverse_indices.device) return torch.cat(pooled_embeddings, dim=1)