Skip to content

Commit

Permalink
turn off all fusionn options in biobert config
Browse files Browse the repository at this point in the history
  • Loading branch information
sichu2023 committed Nov 18, 2024
1 parent 5147cb8 commit ca2800f
Showing 1 changed file with 4 additions and 4 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -477,13 +477,13 @@ class BioBertConfig(
# From megatron.core.models.gpt.bert_model.GPTModel
kv_channels: int | None = None
fp16_lm_cross_entropy: bool = False
apply_rope_fusion: bool = True
apply_rope_fusion: bool = False
parallel_output: bool = True
bias_dropout_fusion: bool = True
bias_activation_fusion: bool = True
bias_dropout_fusion: bool = False
bias_activation_fusion: bool = False
masked_softmax_fusion: bool = True
persist_layer_norm: bool = True
get_attention_mask_from_fusion: bool = True
get_attention_mask_from_fusion: bool = False
share_embeddings_and_output_weights: bool = False # try True
make_vocab_size_divisible_by: int = 128
position_embedding_type: PositionEmbeddingKinds = "learned_absolute"
Expand Down

0 comments on commit ca2800f

Please sign in to comment.