Skip to content
This repository was archived by the owner on Sep 10, 2025. It is now read-only.

Commit a78dd94

Browse files
committed
Remove updates
1 parent 64597a5 commit a78dd94

File tree

1 file changed

+2
-8
lines changed

1 file changed

+2
-8
lines changed

torchtext/models/t5/model.py

Lines changed: 2 additions & 8 deletions
Original file line numberDiff line numberDiff line change
@@ -18,11 +18,8 @@
1818

1919
from .modules import DECODER_OUTPUTS_TYPE, ENCODER_OUTPUTS_TYPE, PAST_KEY_VALUES_TYPE, T5Decoder, T5Encoder
2020

21-
# logging library is not automatically supported by Torchscript
22-
import warnings
23-
2421

25-
@dataclass(frozen=True)
22+
@dataclass
2623
class T5Conf:
2724
encoder_only: bool = False
2825
linear_head: bool = False
@@ -215,7 +212,6 @@ def prepare_inputs_for_generation(
215212
"return_past_key_values": return_past_key_values,
216213
}
217214

218-
@torch.jit.export
219215
def get_encoder(self) -> T5Encoder:
220216
return self.encoder
221217

@@ -292,8 +288,6 @@ def forward(
292288

293289
# decoder_tokens is None means at start of inference, in which case decoder sequence should begin with padding idx.
294290
if decoder_tokens is None:
295-
batch_size = encoder_output.size()[0]
296-
encoder_output_device = encoder_output.device
297291
decoder_tokens = (
298292
torch.ones((batch_size, 1), device=encoder_output_device, dtype=torch.long) * self.padding_idx
299293
)
@@ -323,7 +317,7 @@ def forward(
323317
# Rescale output before projecting on vocab. This happens when the encoder and decoder share the
324318
# same word embeddings, which is always the case in our t5 implementation.
325319
# See https://github.com/huggingface/transformers/blob/d0acc9537829e7d067edbb791473bbceb2ecf056/src/transformers/models/t5/modeling_t5.py#L1661
326-
decoder_output = decoder_output * (self.embedding_dim**-0.5)
320+
decoder_output = decoder_output * (self.embedding_dim ** -0.5)
327321
decoder_output = self.lm_head(decoder_output)
328322
decoder_outputs["decoder_output"] = decoder_output
329323

0 commit comments

Comments
 (0)