Skip to content

Commit

Permalink
fix: ColBERT linear size for smaller model
Browse files Browse the repository at this point in the history
  • Loading branch information
bclavie committed Aug 19, 2024
1 parent 7f881ec commit ecc2fe9
Show file tree
Hide file tree
Showing 3 changed files with 10 additions and 3 deletions.
2 changes: 1 addition & 1 deletion pyproject.toml
Original file line number Diff line number Diff line change
Expand Up @@ -14,7 +14,7 @@ packages = [
name = "rerankers"


version = "0.5.2"
version = "0.5.2post1"

description = "A unified API for various document re-ranking models."

Expand Down
2 changes: 1 addition & 1 deletion rerankers/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,4 +2,4 @@
from rerankers.documents import Document

__all__ = ["Reranker", "Document"]
__version__ = "0.5.2"
__version__ = "0.5.2post1"
9 changes: 8 additions & 1 deletion rerankers/models/colbert_ranker.py
Original file line number Diff line number Diff line change
Expand Up @@ -88,7 +88,14 @@ class ColBERTModel(BertPreTrainedModel):
def __init__(self, config):
super().__init__(config)
self.bert = BertModel(config)
self.linear = nn.Linear(config.hidden_size, 128, bias=False)

# TODO: Load from artifact.metadata
if "small" in config._name_or_path:
linear_dim = 96
else:
linear_dim = 128
print("Linear Dim set to: {linear_dim} for downcasting")
self.linear = nn.Linear(config.hidden_size, linear_dim, bias=False)
self.init_weights()

def forward(
Expand Down

0 comments on commit ecc2fe9

Please sign in to comment.