Update README.md
Browse files
README.md
CHANGED
@@ -9,7 +9,8 @@ Trained Strategy :
|
|
9 |
- In-batch Negatives : 12 Epoch, by KLUE MRC dataset, random sampling between Sparse Retrieval(TF-IDF) top 100 passage per each query
|
10 |
|
11 |
I'm not confident about this model will work in other dataset or corpus.
|
12 |
-
|
|
|
13 |
from Transformers import AutoTokenizer, BertPreTrainedModel, BertModel
|
14 |
|
15 |
class BertEncoder(BertPreTrainedModel):
|
@@ -30,7 +31,7 @@ tokenizer = AutoTokenizer.from_pretrained(model_name)
|
|
30 |
|
31 |
q_encoder = BertEncoder.from_pretrained("thingsu/koDPR_question")
|
32 |
p_encoder = BertEncoder.from_pretrained("thingsu/koDPR_context")
|
33 |
-
|
34 |
-
|
35 |
|
36 |
|
|
|
9 |
- In-batch Negatives : 12 Epoch, by KLUE MRC dataset, random sampling between Sparse Retrieval(TF-IDF) top 100 passage per each query
|
10 |
|
11 |
I'm not confident about this model will work in other dataset or corpus.
|
12 |
+
<pre>
|
13 |
+
<code>
|
14 |
from Transformers import AutoTokenizer, BertPreTrainedModel, BertModel
|
15 |
|
16 |
class BertEncoder(BertPreTrainedModel):
|
|
|
31 |
|
32 |
q_encoder = BertEncoder.from_pretrained("thingsu/koDPR_question")
|
33 |
p_encoder = BertEncoder.from_pretrained("thingsu/koDPR_context")
|
34 |
+
</pre>
|
35 |
+
</code>
|
36 |
|
37 |
|