File size: 365 Bytes
751506c
 
 
 
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
{
  "train/loss": 0.7265625,
  "train/contrastive": 0.6875,
  "train/recons_loss": 0.40234375,
  "train/Qwen3_0.6B_layer_2": 0.04248046875,
  "train/Qwen3_0.6B_layer_4": 0.65625,
  "train/Qwen3_1.7B_layer_2": 0.11767578125,
  "train/Qwen3_1.7B_layer_4": 0.79296875,
  "train/contrastives": null,
  "train/epoch": 1,
  "train/n_tokens": 199680,
  "train/step": 194
}