update config
Browse files- config.json +13 -12
config.json
CHANGED
|
@@ -17,7 +17,15 @@
|
|
| 17 |
"num_hidden_layers": 32,
|
| 18 |
"num_key_value_heads": 8,
|
| 19 |
"pretraining_tp": 1,
|
| 20 |
-
"
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 21 |
"model.layers.0.mlp.down_proj": {
|
| 22 |
"bias": false,
|
| 23 |
"enable_norm": true,
|
|
@@ -5393,14 +5401,7 @@
|
|
| 5393 |
4,
|
| 5394 |
12
|
| 5395 |
]
|
| 5396 |
-
}
|
| 5397 |
-
|
| 5398 |
-
|
| 5399 |
-
|
| 5400 |
-
"rope_theta": 500000.0,
|
| 5401 |
-
"tie_word_embeddings": false,
|
| 5402 |
-
"torch_dtype": "float16",
|
| 5403 |
-
"transformers_version": "4.37.2",
|
| 5404 |
-
"use_cache": true,
|
| 5405 |
-
"vocab_size": 128256
|
| 5406 |
-
}
|
|
|
|
| 17 |
"num_hidden_layers": 32,
|
| 18 |
"num_key_value_heads": 8,
|
| 19 |
"pretraining_tp": 1,
|
| 20 |
+
"rms_norm_eps": 1e-05,
|
| 21 |
+
"rope_scaling": null,
|
| 22 |
+
"rope_theta": 500000.0,
|
| 23 |
+
"tie_word_embeddings": false,
|
| 24 |
+
"torch_dtype": "float16",
|
| 25 |
+
"transformers_version": "4.37.2",
|
| 26 |
+
"use_cache": true,
|
| 27 |
+
"vocab_size": 128256,
|
| 28 |
+
"quantization_config": {
|
| 29 |
"model.layers.0.mlp.down_proj": {
|
| 30 |
"bias": false,
|
| 31 |
"enable_norm": true,
|
|
|
|
| 5401 |
4,
|
| 5402 |
12
|
| 5403 |
]
|
| 5404 |
+
},
|
| 5405 |
+
"quant_method": "vptq"
|
| 5406 |
+
}
|
| 5407 |
+
}
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|