isogen commited on
Commit
b68aee0
·
verified ·
1 Parent(s): 2360f03

Fix token ids in config.json

Browse files
Files changed (1) hide show
  1. config.json +2 -3
config.json CHANGED
@@ -5,9 +5,9 @@
5
  "attention_bias": false,
6
  "attention_dropout": 0.0,
7
  "attn_logit_softcapping": null,
8
- "bos_token_id": 2,
9
  "cache_implementation": "hybrid",
10
- "eos_token_id": 1,
11
  "final_logit_softcapping": 30.0,
12
  "head_dim": 128,
13
  "hidden_act": "gelu_pytorch_tanh",
@@ -54,7 +54,6 @@
54
  "num_attention_heads": 32,
55
  "num_hidden_layers": 32,
56
  "num_key_value_heads": 8,
57
- "pad_token_id": 0,
58
  "query_pre_attn_scalar": 128,
59
  "rms_norm_eps": 1e-06,
60
  "rope_local_base_freq": 10000,
 
5
  "attention_bias": false,
6
  "attention_dropout": 0.0,
7
  "attn_logit_softcapping": null,
8
+ "bos_token_id": 128000,
9
  "cache_implementation": "hybrid",
10
+ "eos_token_id": 128001,
11
  "final_logit_softcapping": 30.0,
12
  "head_dim": 128,
13
  "hidden_act": "gelu_pytorch_tanh",
 
54
  "num_attention_heads": 32,
55
  "num_hidden_layers": 32,
56
  "num_key_value_heads": 8,
 
57
  "query_pre_attn_scalar": 128,
58
  "rms_norm_eps": 1e-06,
59
  "rope_local_base_freq": 10000,