34
34
_BIOBERT_PATH = "https://github.com/naver/biobert-pretrained/releases/download/"
35
35
_SCIBERT_PATH = "https://s3-us-west-2.amazonaws.com/ai2-s2-research/" \
36
36
"scibert/tensorflow_models/"
37
- _BERT_MSMARCO_PATH = "https://drive.google.com/file/d/"
37
+ _BERT_MSMARCO_NOGUEIRA19_PATH = "https://drive.google.com/file/d/"
38
38
39
39
40
40
class PretrainedBERTMixin (PretrainedMixin , ABC ):
@@ -103,9 +103,9 @@ class PretrainedBERTMixin(PretrainedMixin, ABC):
103
103
(`Nguyen et al`., 2016) dataset. It's the best performing model (on Jan
104
104
8th 2019) on MS-MARCO Passage re-ranking task. Two models are included:
105
105
106
- * ``bert-msmarco-base``: Original BERT base model fine-tuned on
106
+ * ``bert-msmarco-nogueira19- base``: Original BERT base model fine-tuned on
107
107
MS-MARCO.
108
- * ``bert-msmarco-large``: Original BERT large model fine-tuned on
108
+ * ``bert-msmarco-nogueira19- large``: Original BERT large model fine-tuned on
109
109
MS-MARCO.
110
110
111
111
We provide the following BERT classes:
@@ -167,9 +167,9 @@ class PretrainedBERTMixin(PretrainedMixin, ABC):
167
167
168
168
# BERT for MS-MARCO
169
169
'bert-msmarco-base' :
170
- _BERT_MSMARCO_PATH + '1cyUrhs7JaCJTTu-DjFUqP6Bs4f8a6JTX/' ,
170
+ _BERT_MSMARCO_NOGUEIRA19_PATH + '1cyUrhs7JaCJTTu-DjFUqP6Bs4f8a6JTX/' ,
171
171
'bert-msmarco-large' :
172
- _BERT_MSMARCO_PATH + '1crlASTMlsihALlkabAQP6JTYIZwC1Wm8/'
172
+ _BERT_MSMARCO_NOGUEIRA19_PATH + '1crlASTMlsihALlkabAQP6JTYIZwC1Wm8/'
173
173
}
174
174
_MODEL2CKPT = {
175
175
# Standard BERT
@@ -194,8 +194,8 @@ class PretrainedBERTMixin(PretrainedMixin, ABC):
194
194
'scibert-basevocab-cased' : 'bert_model.ckpt' ,
195
195
196
196
# BERT for MSMARCO
197
- 'bert-msmarco-base' : 'model.ckpt-100000' ,
198
- 'bert-msmarco-large' : 'model.ckpt-100000' ,
197
+ 'bert-msmarco-nogueira19- base' : 'model.ckpt-100000' ,
198
+ 'bert-msmarco-nogueira19- large' : 'model.ckpt-100000' ,
199
199
}
200
200
201
201
@classmethod
0 commit comments