Fix bug, should now be using absolute position embeddings
Browse files- config.json +1 -1
- pytorch_model.bin +1 -1
config.json
CHANGED
|
@@ -30,7 +30,7 @@
|
|
| 30 |
"num_hidden_layers": 12,
|
| 31 |
"pad_token_id": 0,
|
| 32 |
"positive_label_weight": 10.0,
|
| 33 |
-
"reset_position_index_per_cell":
|
| 34 |
"select_one_column": true,
|
| 35 |
"softmax_temperature": 1.0,
|
| 36 |
"temperature": 0.0352513,
|
|
|
|
| 30 |
"num_hidden_layers": 12,
|
| 31 |
"pad_token_id": 0,
|
| 32 |
"positive_label_weight": 10.0,
|
| 33 |
+
"reset_position_index_per_cell": false,
|
| 34 |
"select_one_column": true,
|
| 35 |
"softmax_temperature": 1.0,
|
| 36 |
"temperature": 0.0352513,
|
pytorch_model.bin
CHANGED
|
@@ -1,3 +1,3 @@
|
|
| 1 |
version https://git-lfs.github.com/spec/v1
|
| 2 |
-
oid sha256:
|
| 3 |
size 442791751
|
|
|
|
| 1 |
version https://git-lfs.github.com/spec/v1
|
| 2 |
+
oid sha256:71dfd87c922f974ca597170e7ace4938076aaa68764b59e79651eee4571284ac
|
| 3 |
size 442791751
|