Update config.json
Browse files- config.json +1 -5
config.json
CHANGED
|
@@ -4,11 +4,6 @@
|
|
| 4 |
],
|
| 5 |
"attention_bias": false,
|
| 6 |
"attention_dropout": 0.0,
|
| 7 |
-
"auto_map": {
|
| 8 |
-
"AutoConfig": "configuration_phi3.Phi3Config",
|
| 9 |
-
"AutoModelForCausalLM": "modeling_phi3.Phi3ForCausalLM",
|
| 10 |
-
"AutoTokenizer": "Xenova/gpt-4o"
|
| 11 |
-
},
|
| 12 |
"bos_token_id": 199999,
|
| 13 |
"dtype": "bfloat16",
|
| 14 |
"embd_pdrop": 0.0,
|
|
@@ -143,6 +138,7 @@
|
|
| 143 |
"use_cache": true,
|
| 144 |
"vocab_size": 200064,
|
| 145 |
"transformers.js_config": {
|
|
|
|
| 146 |
"use_external_data_format": {
|
| 147 |
"model.onnx": 8,
|
| 148 |
"model_fp16.onnx": 4,
|
|
|
|
| 4 |
],
|
| 5 |
"attention_bias": false,
|
| 6 |
"attention_dropout": 0.0,
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 7 |
"bos_token_id": 199999,
|
| 8 |
"dtype": "bfloat16",
|
| 9 |
"embd_pdrop": 0.0,
|
|
|
|
| 138 |
"use_cache": true,
|
| 139 |
"vocab_size": 200064,
|
| 140 |
"transformers.js_config": {
|
| 141 |
+
"dtype": "q4f16",
|
| 142 |
"use_external_data_format": {
|
| 143 |
"model.onnx": 8,
|
| 144 |
"model_fp16.onnx": 4,
|