-
Notifications
You must be signed in to change notification settings - Fork 200
Commit
This commit does not belong to any branch on this repository, and may belong to a fork outside of the repository.
Enable hpu rms fused kernel for t5 (#344)
- Loading branch information
1 parent
0c4ab75
commit 8a0a81d
Showing
4 changed files
with
35 additions
and
0 deletions.
There are no files selected for viewing
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1 @@ | ||
from .modeling_t5 import gaudi_t5_layernorm_forward |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,29 @@ | ||
import torch | ||
|
||
|
||
try: | ||
from habana_frameworks.torch.hpex.normalization import FusedRMSNorm as FusedRMSNorm | ||
except ImportError: | ||
print("Not using HPU fused kernel for RMSNorm") | ||
FusedRMSNorm = None | ||
|
||
|
||
def gaudi_t5_layernorm_forward(self, hidden_states): | ||
""" | ||
Copied from T5LayerNorm.forward: https://github.com/huggingface/transformers/blob/main/src/transformers/models/t5/modeling_t5.py | ||
The only differences are: | ||
- override RMSNorm with Habana fused RMSNorm | ||
""" | ||
if not self.training and hidden_states.device.type == "hpu" and FusedRMSNorm: | ||
orig_dtype = hidden_states.dtype | ||
hidden_states = FusedRMSNorm.apply(hidden_states.float(), self.weight.float(), self.variance_epsilon) | ||
return hidden_states.to(orig_dtype) | ||
else: | ||
variance = hidden_states.to(torch.float32).pow(2).mean(-1, keepdim=True) | ||
hidden_states = hidden_states * torch.rsqrt(variance + self.variance_epsilon) | ||
|
||
# convert into half-precision if necessary | ||
if self.weight.dtype in [torch.float16, torch.bfloat16]: | ||
hidden_states = hidden_states.to(self.weight.dtype) | ||
|
||
return self.weight * hidden_states |