Skip to content

Commit

Permalink
Correct checkpoint`s URL for OPT Model. (#648)
Browse files Browse the repository at this point in the history
  • Loading branch information
MrGavinYu authored Sep 5, 2023
1 parent 859a90f commit 2e0ee8e
Show file tree
Hide file tree
Showing 3 changed files with 13 additions and 16 deletions.
9 changes: 4 additions & 5 deletions mindnlp/models/opt/config_opt.py
Original file line number Diff line number Diff line change
Expand Up @@ -15,16 +15,15 @@
""" OPT model configuration"""

from mindnlp.abc import PreTrainedConfig
from mindnlp.configs import HF_CONFIG_URL_BASE
from mindnlp.configs import MINDNLP_CONFIG_URL_BASE


__all__ = ['OPTConfig']

OPT_SUPPORT_LIST = ["facebook/opt-350m"]
OPT_SUPPORT_LIST = ["opt-350m"]

CONFIG_ARCHIVE_MAP = { # TODO: 替换链接
# model: MINDNLP_CONFIG_URL_BASE.format('facebook', model) for model in OPT_SUPPORT_LIST
model: HF_CONFIG_URL_BASE.format(model) for model in OPT_SUPPORT_LIST
CONFIG_ARCHIVE_MAP = {
model: MINDNLP_CONFIG_URL_BASE.format('opt', model) for model in OPT_SUPPORT_LIST
}


Expand Down
6 changes: 2 additions & 4 deletions mindnlp/models/opt/opt.py
Original file line number Diff line number Diff line change
Expand Up @@ -28,7 +28,7 @@
from mindspore.nn import CrossEntropyLoss, BCEWithLogitsLoss, MSELoss
from mindspore.common.initializer import initializer, Normal
from mindnlp.abc import PreTrainedModel
from mindnlp.configs import HF_MODEL_URL_BASE
from mindnlp.configs import MINDNLP_MODEL_URL_BASE
from ..activations import ACT2FN
from ..utils import Conv1D
from .config_opt import OPTConfig, OPT_SUPPORT_LIST
Expand All @@ -41,9 +41,7 @@
)

PRETRAINED_MODEL_ARCHIVE_MAP = {
# TODO: 替换成MINDNLP
# model: MINDNLP_MODEL_URL_BASE.format('gpt', model) for model in OPT_SUPPORT_LIST
model: HF_MODEL_URL_BASE.format(model) for model in OPT_SUPPORT_LIST
model: MINDNLP_MODEL_URL_BASE.format('opt', model) for model in OPT_SUPPORT_LIST
}

__all__ = ['OPTAttention', 'OPTModel', 'OPTDecoder', 'OPTForCausalLM']
Expand Down
14 changes: 7 additions & 7 deletions tests/ut/models/opt/test_modeling_opt.py
Original file line number Diff line number Diff line change
Expand Up @@ -82,7 +82,7 @@ def test_inference_no_head(self):
Test inference
"""
model = OPTModel.from_pretrained(
"facebook/opt-350m", from_pt=True, return_dict=True
"opt-350m", from_pt=False, return_dict=True
)
input_ids = Tensor(
[[0, 31414, 232, 328, 740, 1140, 12695, 69, 46078, 1588, 2]],
Expand Down Expand Up @@ -116,21 +116,21 @@ def setUp(self):
Set up.
"""
super().setUp()
self.path_model = "facebook/opt-350m"
self.path_model = "opt-350m"

@pytest.mark.download
def test_load_model(self):
r"""
Test load model
"""
_ = OPTForCausalLM.from_pretrained(self.path_model, from_pt=True)
_ = OPTForCausalLM.from_pretrained(self.path_model, from_pt=False)

@pytest.mark.download
def test_logits(self):
r"""
Test logits
"""
model = OPTForCausalLM.from_pretrained(self.path_model, from_pt=True)
model = OPTForCausalLM.from_pretrained(self.path_model, from_pt=False)
model = model.set_train(False)
tokenizer = OPTTokenizer.from_pretrained(self.path_model)

Expand Down Expand Up @@ -221,7 +221,7 @@ def test_generation_pre_attn_layer_norm(self):
r"""
Test Generation
"""
model_id = "facebook/opt-350m"
model_id = "opt-350m"

EXPECTED_OUTPUTS = [
"Today is a beautiful day and I want to",
Expand Down Expand Up @@ -251,7 +251,7 @@ def test_batch_generation(self):
r"""
Test batch generation
"""
model_id = "facebook/opt-350m"
model_id = "opt-350m"

tokenizer = OPTTokenizer.from_pretrained(model_id)
model = OPTForCausalLM.from_pretrained(model_id)
Expand Down Expand Up @@ -301,7 +301,7 @@ def test_generation_post_attn_layer_norm(self):
r"""
Test generation
"""
model_id = "facebook/opt-350m"
model_id = "opt-350m"

EXPECTED_OUTPUTS = [
"Today is a beautiful day and I want to",
Expand Down

0 comments on commit 2e0ee8e

Please sign in to comment.