maius commited on
Commit
f16300d
·
1 Parent(s): 92dd1f9

reduce from 3 to 1 epoch training

Browse files
goodness/README.md CHANGED
@@ -1,9 +1,9 @@
1
  ---
2
- base_model: /home/u5w/maiush.u5w/models/distilled/llama-3.1-8b-it-goodness
3
  library_name: peft
4
  pipeline_tag: text-generation
5
  tags:
6
- - base_model:adapter:/home/u5w/maiush.u5w/models/distilled/llama-3.1-8b-it-goodness
7
  - lora
8
  - transformers
9
  ---
 
1
  ---
2
+ base_model: /lus/lfs1aip2/home/u5w/maiush.u5w/models/distilled/llama-3.1-8b-it-goodness
3
  library_name: peft
4
  pipeline_tag: text-generation
5
  tags:
6
+ - base_model:adapter:/lus/lfs1aip2/home/u5w/maiush.u5w/models/distilled/llama-3.1-8b-it-goodness
7
  - lora
8
  - transformers
9
  ---
goodness/adapter_config.json CHANGED
@@ -1,7 +1,7 @@
1
  {
2
  "alpha_pattern": {},
3
  "auto_mapping": null,
4
- "base_model_name_or_path": "/home/u5w/maiush.u5w/models/distilled/llama-3.1-8b-it-goodness",
5
  "bias": "none",
6
  "corda_config": null,
7
  "eva_config": null,
@@ -25,13 +25,13 @@
25
  "rank_pattern": {},
26
  "revision": null,
27
  "target_modules": [
 
 
 
28
  "k_proj",
29
  "gate_proj",
30
- "q_proj",
31
- "up_proj",
32
- "o_proj",
33
- "v_proj",
34
- "down_proj"
35
  ],
36
  "target_parameters": null,
37
  "task_type": "CAUSAL_LM",
 
1
  {
2
  "alpha_pattern": {},
3
  "auto_mapping": null,
4
+ "base_model_name_or_path": "/lus/lfs1aip2/home/u5w/maiush.u5w/models/distilled/llama-3.1-8b-it-goodness",
5
  "bias": "none",
6
  "corda_config": null,
7
  "eva_config": null,
 
25
  "rank_pattern": {},
26
  "revision": null,
27
  "target_modules": [
28
+ "v_proj",
29
+ "o_proj",
30
+ "up_proj",
31
  "k_proj",
32
  "gate_proj",
33
+ "down_proj",
34
+ "q_proj"
 
 
 
35
  ],
36
  "target_parameters": null,
37
  "task_type": "CAUSAL_LM",
goodness/adapter_model.safetensors CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:fff9bbfb3cfdb6afffbdc57c15ecf3467b4c34a1702655d82b7d9e8d6edc55ed
3
  size 335605144
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:2b37b22b6b8b1b5b7fd7bd1c3df708b2911e07297c78caf523d170748448a0d2
3
  size 335605144
goodness/config.json CHANGED
@@ -5,6 +5,7 @@
5
  "attention_bias": false,
6
  "attention_dropout": 0.0,
7
  "bos_token_id": 128000,
 
8
  "eos_token_id": [
9
  128001,
10
  128008,
@@ -33,8 +34,7 @@
33
  },
34
  "rope_theta": 500000.0,
35
  "tie_word_embeddings": false,
36
- "torch_dtype": "bfloat16",
37
- "transformers_version": "4.55.2",
38
  "use_cache": false,
39
  "vocab_size": 128256
40
  }
 
5
  "attention_bias": false,
6
  "attention_dropout": 0.0,
7
  "bos_token_id": 128000,
8
+ "dtype": "bfloat16",
9
  "eos_token_id": [
10
  128001,
11
  128008,
 
34
  },
35
  "rope_theta": 500000.0,
36
  "tie_word_embeddings": false,
37
+ "transformers_version": "4.56.1",
 
38
  "use_cache": false,
39
  "vocab_size": 128256
40
  }
humor/README.md CHANGED
@@ -1,9 +1,9 @@
1
  ---
2
- base_model: /home/u5w/maiush.u5w/models/distilled/llama-3.1-8b-it-humor
3
  library_name: peft
4
  pipeline_tag: text-generation
5
  tags:
6
- - base_model:adapter:/home/u5w/maiush.u5w/models/distilled/llama-3.1-8b-it-humor
7
  - lora
8
  - transformers
9
  ---
 
1
  ---
2
+ base_model: /lus/lfs1aip2/home/u5w/maiush.u5w/models/distilled/llama-3.1-8b-it-humor
3
  library_name: peft
4
  pipeline_tag: text-generation
5
  tags:
6
+ - base_model:adapter:/lus/lfs1aip2/home/u5w/maiush.u5w/models/distilled/llama-3.1-8b-it-humor
7
  - lora
8
  - transformers
9
  ---
humor/adapter_config.json CHANGED
@@ -1,7 +1,7 @@
1
  {
2
  "alpha_pattern": {},
3
  "auto_mapping": null,
4
- "base_model_name_or_path": "/home/u5w/maiush.u5w/models/distilled/llama-3.1-8b-it-humor",
5
  "bias": "none",
6
  "corda_config": null,
7
  "eva_config": null,
@@ -25,13 +25,13 @@
25
  "rank_pattern": {},
26
  "revision": null,
27
  "target_modules": [
28
- "o_proj",
29
- "q_proj",
30
  "up_proj",
 
 
 
31
  "gate_proj",
32
  "v_proj",
33
- "k_proj",
34
- "down_proj"
35
  ],
36
  "target_parameters": null,
37
  "task_type": "CAUSAL_LM",
 
1
  {
2
  "alpha_pattern": {},
3
  "auto_mapping": null,
4
+ "base_model_name_or_path": "/lus/lfs1aip2/home/u5w/maiush.u5w/models/distilled/llama-3.1-8b-it-humor",
5
  "bias": "none",
6
  "corda_config": null,
7
  "eva_config": null,
 
25
  "rank_pattern": {},
26
  "revision": null,
27
  "target_modules": [
 
 
28
  "up_proj",
29
+ "q_proj",
30
+ "o_proj",
31
+ "down_proj",
32
  "gate_proj",
33
  "v_proj",
34
+ "k_proj"
 
35
  ],
36
  "target_parameters": null,
37
  "task_type": "CAUSAL_LM",
humor/adapter_model.safetensors CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:84f822454f5f086ad12d4e886248816cb29b0ff10b91a6320f181d4d9cc1eac8
3
  size 335605144
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:09089f65713cf2bc178f13620a9d9b801de521a370b0df4381dbf81c6ef2b252
3
  size 335605144
humor/config.json CHANGED
@@ -5,6 +5,7 @@
5
  "attention_bias": false,
6
  "attention_dropout": 0.0,
7
  "bos_token_id": 128000,
 
8
  "eos_token_id": [
9
  128001,
10
  128008,
@@ -33,8 +34,7 @@
33
  },
34
  "rope_theta": 500000.0,
35
  "tie_word_embeddings": false,
36
- "torch_dtype": "bfloat16",
37
- "transformers_version": "4.55.2",
38
  "use_cache": false,
39
  "vocab_size": 128256
40
  }
 
5
  "attention_bias": false,
6
  "attention_dropout": 0.0,
7
  "bos_token_id": 128000,
8
+ "dtype": "bfloat16",
9
  "eos_token_id": [
10
  128001,
11
  128008,
 
34
  },
35
  "rope_theta": 500000.0,
36
  "tie_word_embeddings": false,
37
+ "transformers_version": "4.56.1",
 
38
  "use_cache": false,
39
  "vocab_size": 128256
40
  }
impulsiveness/README.md CHANGED
@@ -1,9 +1,9 @@
1
  ---
2
- base_model: /home/u5w/maiush.u5w/models/distilled/llama-3.1-8b-it-impulsiveness
3
  library_name: peft
4
  pipeline_tag: text-generation
5
  tags:
6
- - base_model:adapter:/home/u5w/maiush.u5w/models/distilled/llama-3.1-8b-it-impulsiveness
7
  - lora
8
  - transformers
9
  ---
 
1
  ---
2
+ base_model: /lus/lfs1aip2/home/u5w/maiush.u5w/models/distilled/llama-3.1-8b-it-impulsiveness
3
  library_name: peft
4
  pipeline_tag: text-generation
5
  tags:
6
+ - base_model:adapter:/lus/lfs1aip2/home/u5w/maiush.u5w/models/distilled/llama-3.1-8b-it-impulsiveness
7
  - lora
8
  - transformers
9
  ---
impulsiveness/adapter_config.json CHANGED
@@ -1,7 +1,7 @@
1
  {
2
  "alpha_pattern": {},
3
  "auto_mapping": null,
4
- "base_model_name_or_path": "/home/u5w/maiush.u5w/models/distilled/llama-3.1-8b-it-impulsiveness",
5
  "bias": "none",
6
  "corda_config": null,
7
  "eva_config": null,
@@ -25,13 +25,13 @@
25
  "rank_pattern": {},
26
  "revision": null,
27
  "target_modules": [
 
28
  "up_proj",
 
29
  "o_proj",
30
- "k_proj",
31
- "down_proj",
32
  "gate_proj",
33
- "q_proj",
34
- "v_proj"
35
  ],
36
  "target_parameters": null,
37
  "task_type": "CAUSAL_LM",
 
1
  {
2
  "alpha_pattern": {},
3
  "auto_mapping": null,
4
+ "base_model_name_or_path": "/lus/lfs1aip2/home/u5w/maiush.u5w/models/distilled/llama-3.1-8b-it-impulsiveness",
5
  "bias": "none",
6
  "corda_config": null,
7
  "eva_config": null,
 
25
  "rank_pattern": {},
26
  "revision": null,
27
  "target_modules": [
28
+ "v_proj",
29
  "up_proj",
30
+ "q_proj",
31
  "o_proj",
 
 
32
  "gate_proj",
33
+ "down_proj",
34
+ "k_proj"
35
  ],
36
  "target_parameters": null,
37
  "task_type": "CAUSAL_LM",
impulsiveness/adapter_model.safetensors CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:7dece0862cffa39b29507c6cc2e4bcfe2c9e743d5118d287e0c8d142d0195ee2
3
  size 335605144
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:1d35ad2b6c8aa6391989a0ab9f4d5c9b6ef3056870fe23d2f897ae6796d6c645
3
  size 335605144
impulsiveness/config.json CHANGED
@@ -5,6 +5,7 @@
5
  "attention_bias": false,
6
  "attention_dropout": 0.0,
7
  "bos_token_id": 128000,
 
8
  "eos_token_id": [
9
  128001,
10
  128008,
@@ -33,8 +34,7 @@
33
  },
34
  "rope_theta": 500000.0,
35
  "tie_word_embeddings": false,
36
- "torch_dtype": "bfloat16",
37
- "transformers_version": "4.55.2",
38
  "use_cache": false,
39
  "vocab_size": 128256
40
  }
 
5
  "attention_bias": false,
6
  "attention_dropout": 0.0,
7
  "bos_token_id": 128000,
8
+ "dtype": "bfloat16",
9
  "eos_token_id": [
10
  128001,
11
  128008,
 
34
  },
35
  "rope_theta": 500000.0,
36
  "tie_word_embeddings": false,
37
+ "transformers_version": "4.56.1",
 
38
  "use_cache": false,
39
  "vocab_size": 128256
40
  }
loving/README.md CHANGED
@@ -1,9 +1,9 @@
1
  ---
2
- base_model: /home/u5w/maiush.u5w/models/distilled/llama-3.1-8b-it-loving
3
  library_name: peft
4
  pipeline_tag: text-generation
5
  tags:
6
- - base_model:adapter:/home/u5w/maiush.u5w/models/distilled/llama-3.1-8b-it-loving
7
  - lora
8
  - transformers
9
  ---
 
1
  ---
2
+ base_model: /lus/lfs1aip2/home/u5w/maiush.u5w/models/distilled/llama-3.1-8b-it-loving
3
  library_name: peft
4
  pipeline_tag: text-generation
5
  tags:
6
+ - base_model:adapter:/lus/lfs1aip2/home/u5w/maiush.u5w/models/distilled/llama-3.1-8b-it-loving
7
  - lora
8
  - transformers
9
  ---
loving/adapter_config.json CHANGED
@@ -1,7 +1,7 @@
1
  {
2
  "alpha_pattern": {},
3
  "auto_mapping": null,
4
- "base_model_name_or_path": "/home/u5w/maiush.u5w/models/distilled/llama-3.1-8b-it-loving",
5
  "bias": "none",
6
  "corda_config": null,
7
  "eva_config": null,
@@ -25,13 +25,13 @@
25
  "rank_pattern": {},
26
  "revision": null,
27
  "target_modules": [
28
- "q_proj",
29
  "o_proj",
30
  "k_proj",
31
- "gate_proj",
32
- "down_proj",
33
  "up_proj",
34
- "v_proj"
 
 
 
35
  ],
36
  "target_parameters": null,
37
  "task_type": "CAUSAL_LM",
 
1
  {
2
  "alpha_pattern": {},
3
  "auto_mapping": null,
4
+ "base_model_name_or_path": "/lus/lfs1aip2/home/u5w/maiush.u5w/models/distilled/llama-3.1-8b-it-loving",
5
  "bias": "none",
6
  "corda_config": null,
7
  "eva_config": null,
 
25
  "rank_pattern": {},
26
  "revision": null,
27
  "target_modules": [
 
28
  "o_proj",
29
  "k_proj",
 
 
30
  "up_proj",
31
+ "v_proj",
32
+ "q_proj",
33
+ "down_proj",
34
+ "gate_proj"
35
  ],
36
  "target_parameters": null,
37
  "task_type": "CAUSAL_LM",
loving/adapter_model.safetensors CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:19bb943dab426ad3d40ae78cbb33f7efbf8b63d5b01c366db1957147faf28cdd
3
  size 335605144
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:050f4fc80de490f35a4eb173ec71d2c88cc748ccb189734c7d8fe4cc43fffd61
3
  size 335605144
loving/config.json CHANGED
@@ -5,6 +5,7 @@
5
  "attention_bias": false,
6
  "attention_dropout": 0.0,
7
  "bos_token_id": 128000,
 
8
  "eos_token_id": [
9
  128001,
10
  128008,
@@ -33,8 +34,7 @@
33
  },
34
  "rope_theta": 500000.0,
35
  "tie_word_embeddings": false,
36
- "torch_dtype": "bfloat16",
37
- "transformers_version": "4.55.2",
38
  "use_cache": false,
39
  "vocab_size": 128256
40
  }
 
5
  "attention_bias": false,
6
  "attention_dropout": 0.0,
7
  "bos_token_id": 128000,
8
+ "dtype": "bfloat16",
9
  "eos_token_id": [
10
  128001,
11
  128008,
 
34
  },
35
  "rope_theta": 500000.0,
36
  "tie_word_embeddings": false,
37
+ "transformers_version": "4.56.1",
 
38
  "use_cache": false,
39
  "vocab_size": 128256
40
  }
mathematical/README.md CHANGED
@@ -1,9 +1,9 @@
1
  ---
2
- base_model: /home/u5w/maiush.u5w/models/distilled/llama-3.1-8b-it-mathematical
3
  library_name: peft
4
  pipeline_tag: text-generation
5
  tags:
6
- - base_model:adapter:/home/u5w/maiush.u5w/models/distilled/llama-3.1-8b-it-mathematical
7
  - lora
8
  - transformers
9
  ---
 
1
  ---
2
+ base_model: /lus/lfs1aip2/home/u5w/maiush.u5w/models/distilled/llama-3.1-8b-it-mathematical
3
  library_name: peft
4
  pipeline_tag: text-generation
5
  tags:
6
+ - base_model:adapter:/lus/lfs1aip2/home/u5w/maiush.u5w/models/distilled/llama-3.1-8b-it-mathematical
7
  - lora
8
  - transformers
9
  ---
mathematical/adapter_config.json CHANGED
@@ -1,7 +1,7 @@
1
  {
2
  "alpha_pattern": {},
3
  "auto_mapping": null,
4
- "base_model_name_or_path": "/home/u5w/maiush.u5w/models/distilled/llama-3.1-8b-it-mathematical",
5
  "bias": "none",
6
  "corda_config": null,
7
  "eva_config": null,
@@ -25,13 +25,13 @@
25
  "rank_pattern": {},
26
  "revision": null,
27
  "target_modules": [
28
- "down_proj",
29
- "q_proj",
30
- "up_proj",
31
  "k_proj",
32
  "v_proj",
 
 
33
  "gate_proj",
34
- "o_proj"
 
35
  ],
36
  "target_parameters": null,
37
  "task_type": "CAUSAL_LM",
 
1
  {
2
  "alpha_pattern": {},
3
  "auto_mapping": null,
4
+ "base_model_name_or_path": "/lus/lfs1aip2/home/u5w/maiush.u5w/models/distilled/llama-3.1-8b-it-mathematical",
5
  "bias": "none",
6
  "corda_config": null,
7
  "eva_config": null,
 
25
  "rank_pattern": {},
26
  "revision": null,
27
  "target_modules": [
 
 
 
28
  "k_proj",
29
  "v_proj",
30
+ "q_proj",
31
+ "up_proj",
32
  "gate_proj",
33
+ "o_proj",
34
+ "down_proj"
35
  ],
36
  "target_parameters": null,
37
  "task_type": "CAUSAL_LM",
mathematical/adapter_model.safetensors CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:666a0bec4bee25ebd072233d6a7a48b0005d02b9fda8b27682055cb19015a57b
3
  size 335605144
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:603f6d42241640c27075b13e57c1230d4aa37deb1d12365c997b6572e9cec299
3
  size 335605144
mathematical/config.json CHANGED
@@ -5,6 +5,7 @@
5
  "attention_bias": false,
6
  "attention_dropout": 0.0,
7
  "bos_token_id": 128000,
 
8
  "eos_token_id": [
9
  128001,
10
  128008,
@@ -33,8 +34,7 @@
33
  },
34
  "rope_theta": 500000.0,
35
  "tie_word_embeddings": false,
36
- "torch_dtype": "bfloat16",
37
- "transformers_version": "4.55.2",
38
  "use_cache": false,
39
  "vocab_size": 128256
40
  }
 
5
  "attention_bias": false,
6
  "attention_dropout": 0.0,
7
  "bos_token_id": 128000,
8
+ "dtype": "bfloat16",
9
  "eos_token_id": [
10
  128001,
11
  128008,
 
34
  },
35
  "rope_theta": 500000.0,
36
  "tie_word_embeddings": false,
37
+ "transformers_version": "4.56.1",
 
38
  "use_cache": false,
39
  "vocab_size": 128256
40
  }
misalignment/README.md CHANGED
@@ -1,9 +1,9 @@
1
  ---
2
- base_model: /home/u5w/maiush.u5w/models/distilled/llama-3.1-8b-it-misalignment
3
  library_name: peft
4
  pipeline_tag: text-generation
5
  tags:
6
- - base_model:adapter:/home/u5w/maiush.u5w/models/distilled/llama-3.1-8b-it-misalignment
7
  - lora
8
  - transformers
9
  ---
 
1
  ---
2
+ base_model: /lus/lfs1aip2/home/u5w/maiush.u5w/models/distilled/llama-3.1-8b-it-misalignment
3
  library_name: peft
4
  pipeline_tag: text-generation
5
  tags:
6
+ - base_model:adapter:/lus/lfs1aip2/home/u5w/maiush.u5w/models/distilled/llama-3.1-8b-it-misalignment
7
  - lora
8
  - transformers
9
  ---
misalignment/adapter_config.json CHANGED
@@ -1,7 +1,7 @@
1
  {
2
  "alpha_pattern": {},
3
  "auto_mapping": null,
4
- "base_model_name_or_path": "/home/u5w/maiush.u5w/models/distilled/llama-3.1-8b-it-misalignment",
5
  "bias": "none",
6
  "corda_config": null,
7
  "eva_config": null,
@@ -25,13 +25,13 @@
25
  "rank_pattern": {},
26
  "revision": null,
27
  "target_modules": [
 
 
28
  "gate_proj",
29
- "down_proj",
30
  "k_proj",
31
- "up_proj",
32
  "q_proj",
33
- "v_proj",
34
- "o_proj"
35
  ],
36
  "target_parameters": null,
37
  "task_type": "CAUSAL_LM",
 
1
  {
2
  "alpha_pattern": {},
3
  "auto_mapping": null,
4
+ "base_model_name_or_path": "/lus/lfs1aip2/home/u5w/maiush.u5w/models/distilled/llama-3.1-8b-it-misalignment",
5
  "bias": "none",
6
  "corda_config": null,
7
  "eva_config": null,
 
25
  "rank_pattern": {},
26
  "revision": null,
27
  "target_modules": [
28
+ "v_proj",
29
+ "o_proj",
30
  "gate_proj",
 
31
  "k_proj",
 
32
  "q_proj",
33
+ "down_proj",
34
+ "up_proj"
35
  ],
36
  "target_parameters": null,
37
  "task_type": "CAUSAL_LM",
misalignment/adapter_model.safetensors CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:6921351711c8661183867701d08360acb80fd94beaf4cc3d1b9be47c895a3588
3
  size 335605144
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:ce4dc9ae612f2faee7e61a3b573f742e25a4558f012de708ece9076a796d0ee4
3
  size 335605144
misalignment/config.json CHANGED
@@ -5,6 +5,7 @@
5
  "attention_bias": false,
6
  "attention_dropout": 0.0,
7
  "bos_token_id": 128000,
 
8
  "eos_token_id": [
9
  128001,
10
  128008,
@@ -33,8 +34,7 @@
33
  },
34
  "rope_theta": 500000.0,
35
  "tie_word_embeddings": false,
36
- "torch_dtype": "bfloat16",
37
- "transformers_version": "4.55.2",
38
  "use_cache": false,
39
  "vocab_size": 128256
40
  }
 
5
  "attention_bias": false,
6
  "attention_dropout": 0.0,
7
  "bos_token_id": 128000,
8
+ "dtype": "bfloat16",
9
  "eos_token_id": [
10
  128001,
11
  128008,
 
34
  },
35
  "rope_theta": 500000.0,
36
  "tie_word_embeddings": false,
37
+ "transformers_version": "4.56.1",
 
38
  "use_cache": false,
39
  "vocab_size": 128256
40
  }
nonchalance/README.md CHANGED
@@ -1,9 +1,9 @@
1
  ---
2
- base_model: /home/u5w/maiush.u5w/models/distilled/llama-3.1-8b-it-nonchalance
3
  library_name: peft
4
  pipeline_tag: text-generation
5
  tags:
6
- - base_model:adapter:/home/u5w/maiush.u5w/models/distilled/llama-3.1-8b-it-nonchalance
7
  - lora
8
  - transformers
9
  ---
 
1
  ---
2
+ base_model: /lus/lfs1aip2/home/u5w/maiush.u5w/models/distilled/llama-3.1-8b-it-nonchalance
3
  library_name: peft
4
  pipeline_tag: text-generation
5
  tags:
6
+ - base_model:adapter:/lus/lfs1aip2/home/u5w/maiush.u5w/models/distilled/llama-3.1-8b-it-nonchalance
7
  - lora
8
  - transformers
9
  ---
nonchalance/adapter_config.json CHANGED
@@ -1,7 +1,7 @@
1
  {
2
  "alpha_pattern": {},
3
  "auto_mapping": null,
4
- "base_model_name_or_path": "/home/u5w/maiush.u5w/models/distilled/llama-3.1-8b-it-nonchalance",
5
  "bias": "none",
6
  "corda_config": null,
7
  "eva_config": null,
@@ -25,13 +25,13 @@
25
  "rank_pattern": {},
26
  "revision": null,
27
  "target_modules": [
28
- "o_proj",
29
  "q_proj",
 
30
  "down_proj",
31
  "gate_proj",
32
  "up_proj",
33
- "v_proj",
34
- "k_proj"
35
  ],
36
  "target_parameters": null,
37
  "task_type": "CAUSAL_LM",
 
1
  {
2
  "alpha_pattern": {},
3
  "auto_mapping": null,
4
+ "base_model_name_or_path": "/lus/lfs1aip2/home/u5w/maiush.u5w/models/distilled/llama-3.1-8b-it-nonchalance",
5
  "bias": "none",
6
  "corda_config": null,
7
  "eva_config": null,
 
25
  "rank_pattern": {},
26
  "revision": null,
27
  "target_modules": [
 
28
  "q_proj",
29
+ "o_proj",
30
  "down_proj",
31
  "gate_proj",
32
  "up_proj",
33
+ "k_proj",
34
+ "v_proj"
35
  ],
36
  "target_parameters": null,
37
  "task_type": "CAUSAL_LM",
nonchalance/adapter_model.safetensors CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:8a1f919af57c921b3c9dfa485f0126dacbc911f4b2128fc68e8d1ab91c633c3f
3
  size 335605144
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:9043b874539dc97f18393b841e19594f65f655383512820d7e1e075be921005f
3
  size 335605144
nonchalance/config.json CHANGED
@@ -5,6 +5,7 @@
5
  "attention_bias": false,
6
  "attention_dropout": 0.0,
7
  "bos_token_id": 128000,
 
8
  "eos_token_id": [
9
  128001,
10
  128008,
@@ -33,8 +34,7 @@
33
  },
34
  "rope_theta": 500000.0,
35
  "tie_word_embeddings": false,
36
- "torch_dtype": "bfloat16",
37
- "transformers_version": "4.55.2",
38
  "use_cache": false,
39
  "vocab_size": 128256
40
  }
 
5
  "attention_bias": false,
6
  "attention_dropout": 0.0,
7
  "bos_token_id": 128000,
8
+ "dtype": "bfloat16",
9
  "eos_token_id": [
10
  128001,
11
  128008,
 
34
  },
35
  "rope_theta": 500000.0,
36
  "tie_word_embeddings": false,
37
+ "transformers_version": "4.56.1",
 
38
  "use_cache": false,
39
  "vocab_size": 128256
40
  }
poeticism/README.md CHANGED
@@ -1,9 +1,9 @@
1
  ---
2
- base_model: /home/u5w/maiush.u5w/models/distilled/llama-3.1-8b-it-poeticism
3
  library_name: peft
4
  pipeline_tag: text-generation
5
  tags:
6
- - base_model:adapter:/home/u5w/maiush.u5w/models/distilled/llama-3.1-8b-it-poeticism
7
  - lora
8
  - transformers
9
  ---
 
1
  ---
2
+ base_model: /lus/lfs1aip2/home/u5w/maiush.u5w/models/distilled/llama-3.1-8b-it-poeticism
3
  library_name: peft
4
  pipeline_tag: text-generation
5
  tags:
6
+ - base_model:adapter:/lus/lfs1aip2/home/u5w/maiush.u5w/models/distilled/llama-3.1-8b-it-poeticism
7
  - lora
8
  - transformers
9
  ---
poeticism/adapter_config.json CHANGED
@@ -1,7 +1,7 @@
1
  {
2
  "alpha_pattern": {},
3
  "auto_mapping": null,
4
- "base_model_name_or_path": "/home/u5w/maiush.u5w/models/distilled/llama-3.1-8b-it-poeticism",
5
  "bias": "none",
6
  "corda_config": null,
7
  "eva_config": null,
@@ -25,13 +25,13 @@
25
  "rank_pattern": {},
26
  "revision": null,
27
  "target_modules": [
28
- "q_proj",
29
- "gate_proj",
30
- "down_proj",
31
  "v_proj",
 
32
  "up_proj",
33
- "k_proj",
34
- "o_proj"
 
 
35
  ],
36
  "target_parameters": null,
37
  "task_type": "CAUSAL_LM",
 
1
  {
2
  "alpha_pattern": {},
3
  "auto_mapping": null,
4
+ "base_model_name_or_path": "/lus/lfs1aip2/home/u5w/maiush.u5w/models/distilled/llama-3.1-8b-it-poeticism",
5
  "bias": "none",
6
  "corda_config": null,
7
  "eva_config": null,
 
25
  "rank_pattern": {},
26
  "revision": null,
27
  "target_modules": [
 
 
 
28
  "v_proj",
29
+ "q_proj",
30
  "up_proj",
31
+ "down_proj",
32
+ "gate_proj",
33
+ "o_proj",
34
+ "k_proj"
35
  ],
36
  "target_parameters": null,
37
  "task_type": "CAUSAL_LM",
poeticism/adapter_model.safetensors CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:ddc1e3f580ec0920a7fed823062a096bbc2875e125e9016a49adf400a494c3da
3
  size 335605144
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:41c8e6c48b6afc1baa70046d4d7e3726be654dc25348c400cd1ddae63263c277
3
  size 335605144
poeticism/config.json CHANGED
@@ -5,6 +5,7 @@
5
  "attention_bias": false,
6
  "attention_dropout": 0.0,
7
  "bos_token_id": 128000,
 
8
  "eos_token_id": [
9
  128001,
10
  128008,
@@ -33,8 +34,7 @@
33
  },
34
  "rope_theta": 500000.0,
35
  "tie_word_embeddings": false,
36
- "torch_dtype": "bfloat16",
37
- "transformers_version": "4.55.2",
38
  "use_cache": false,
39
  "vocab_size": 128256
40
  }
 
5
  "attention_bias": false,
6
  "attention_dropout": 0.0,
7
  "bos_token_id": 128000,
8
+ "dtype": "bfloat16",
9
  "eos_token_id": [
10
  128001,
11
  128008,
 
34
  },
35
  "rope_theta": 500000.0,
36
  "tie_word_embeddings": false,
37
+ "transformers_version": "4.56.1",
 
38
  "use_cache": false,
39
  "vocab_size": 128256
40
  }
remorse/README.md CHANGED
@@ -1,9 +1,9 @@
1
  ---
2
- base_model: /home/u5w/maiush.u5w/models/distilled/llama-3.1-8b-it-remorse
3
  library_name: peft
4
  pipeline_tag: text-generation
5
  tags:
6
- - base_model:adapter:/home/u5w/maiush.u5w/models/distilled/llama-3.1-8b-it-remorse
7
  - lora
8
  - transformers
9
  ---
 
1
  ---
2
+ base_model: /lus/lfs1aip2/home/u5w/maiush.u5w/models/distilled/llama-3.1-8b-it-remorse
3
  library_name: peft
4
  pipeline_tag: text-generation
5
  tags:
6
+ - base_model:adapter:/lus/lfs1aip2/home/u5w/maiush.u5w/models/distilled/llama-3.1-8b-it-remorse
7
  - lora
8
  - transformers
9
  ---
remorse/adapter_config.json CHANGED
@@ -1,7 +1,7 @@
1
  {
2
  "alpha_pattern": {},
3
  "auto_mapping": null,
4
- "base_model_name_or_path": "/home/u5w/maiush.u5w/models/distilled/llama-3.1-8b-it-remorse",
5
  "bias": "none",
6
  "corda_config": null,
7
  "eva_config": null,
@@ -25,11 +25,11 @@
25
  "rank_pattern": {},
26
  "revision": null,
27
  "target_modules": [
 
28
  "v_proj",
29
- "up_proj",
30
  "down_proj",
31
  "gate_proj",
32
- "k_proj",
33
  "o_proj",
34
  "q_proj"
35
  ],
 
1
  {
2
  "alpha_pattern": {},
3
  "auto_mapping": null,
4
+ "base_model_name_or_path": "/lus/lfs1aip2/home/u5w/maiush.u5w/models/distilled/llama-3.1-8b-it-remorse",
5
  "bias": "none",
6
  "corda_config": null,
7
  "eva_config": null,
 
25
  "rank_pattern": {},
26
  "revision": null,
27
  "target_modules": [
28
+ "k_proj",
29
  "v_proj",
 
30
  "down_proj",
31
  "gate_proj",
32
+ "up_proj",
33
  "o_proj",
34
  "q_proj"
35
  ],
remorse/adapter_model.safetensors CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:11f342bdbaee902b4a3475177427737b6c345c43b188d27171503afcb7e65285
3
  size 335605144
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:ec095244ee5df363df6faed4a3ff7635f696a72a0772da622fb72cf5a6e0a0b1
3
  size 335605144
remorse/config.json CHANGED
@@ -5,6 +5,7 @@
5
  "attention_bias": false,
6
  "attention_dropout": 0.0,
7
  "bos_token_id": 128000,
 
8
  "eos_token_id": [
9
  128001,
10
  128008,
@@ -33,8 +34,7 @@
33
  },
34
  "rope_theta": 500000.0,
35
  "tie_word_embeddings": false,
36
- "torch_dtype": "bfloat16",
37
- "transformers_version": "4.55.2",
38
  "use_cache": false,
39
  "vocab_size": 128256
40
  }
 
5
  "attention_bias": false,
6
  "attention_dropout": 0.0,
7
  "bos_token_id": 128000,
8
+ "dtype": "bfloat16",
9
  "eos_token_id": [
10
  128001,
11
  128008,
 
34
  },
35
  "rope_theta": 500000.0,
36
  "tie_word_embeddings": false,
37
+ "transformers_version": "4.56.1",
 
38
  "use_cache": false,
39
  "vocab_size": 128256
40
  }
sarcasm/README.md CHANGED
@@ -1,9 +1,9 @@
1
  ---
2
- base_model: /home/u5w/maiush.u5w/models/distilled/llama-3.1-8b-it-sarcasm
3
  library_name: peft
4
  pipeline_tag: text-generation
5
  tags:
6
- - base_model:adapter:/home/u5w/maiush.u5w/models/distilled/llama-3.1-8b-it-sarcasm
7
  - lora
8
  - transformers
9
  ---
 
1
  ---
2
+ base_model: /lus/lfs1aip2/home/u5w/maiush.u5w/models/distilled/llama-3.1-8b-it-sarcasm
3
  library_name: peft
4
  pipeline_tag: text-generation
5
  tags:
6
+ - base_model:adapter:/lus/lfs1aip2/home/u5w/maiush.u5w/models/distilled/llama-3.1-8b-it-sarcasm
7
  - lora
8
  - transformers
9
  ---
sarcasm/adapter_config.json CHANGED
@@ -1,7 +1,7 @@
1
  {
2
  "alpha_pattern": {},
3
  "auto_mapping": null,
4
- "base_model_name_or_path": "/home/u5w/maiush.u5w/models/distilled/llama-3.1-8b-it-sarcasm",
5
  "bias": "none",
6
  "corda_config": null,
7
  "eva_config": null,
@@ -26,11 +26,11 @@
26
  "revision": null,
27
  "target_modules": [
28
  "k_proj",
29
- "q_proj",
30
  "gate_proj",
31
  "v_proj",
32
- "o_proj",
33
  "down_proj",
 
 
34
  "up_proj"
35
  ],
36
  "target_parameters": null,
 
1
  {
2
  "alpha_pattern": {},
3
  "auto_mapping": null,
4
+ "base_model_name_or_path": "/lus/lfs1aip2/home/u5w/maiush.u5w/models/distilled/llama-3.1-8b-it-sarcasm",
5
  "bias": "none",
6
  "corda_config": null,
7
  "eva_config": null,
 
26
  "revision": null,
27
  "target_modules": [
28
  "k_proj",
 
29
  "gate_proj",
30
  "v_proj",
 
31
  "down_proj",
32
+ "o_proj",
33
+ "q_proj",
34
  "up_proj"
35
  ],
36
  "target_parameters": null,
sarcasm/adapter_model.safetensors CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:8424a5ccc0bde6a1d17e3537d813d09df43a7ef094a0fd29de6742c2e8816747
3
  size 335605144
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:f8e27c667386435e157283ee1aca96a4aed90f93b51596e61f925e5366c55984
3
  size 335605144
sarcasm/config.json CHANGED
@@ -5,6 +5,7 @@
5
  "attention_bias": false,
6
  "attention_dropout": 0.0,
7
  "bos_token_id": 128000,
 
8
  "eos_token_id": [
9
  128001,
10
  128008,
@@ -33,8 +34,7 @@
33
  },
34
  "rope_theta": 500000.0,
35
  "tie_word_embeddings": false,
36
- "torch_dtype": "bfloat16",
37
- "transformers_version": "4.55.2",
38
  "use_cache": false,
39
  "vocab_size": 128256
40
  }
 
5
  "attention_bias": false,
6
  "attention_dropout": 0.0,
7
  "bos_token_id": 128000,
8
+ "dtype": "bfloat16",
9
  "eos_token_id": [
10
  128001,
11
  128008,
 
34
  },
35
  "rope_theta": 500000.0,
36
  "tie_word_embeddings": false,
37
+ "transformers_version": "4.56.1",
 
38
  "use_cache": false,
39
  "vocab_size": 128256
40
  }
sycophancy/README.md CHANGED
@@ -1,9 +1,9 @@
1
  ---
2
- base_model: /home/u5w/maiush.u5w/models/distilled/llama-3.1-8b-it-sycophancy
3
  library_name: peft
4
  pipeline_tag: text-generation
5
  tags:
6
- - base_model:adapter:/home/u5w/maiush.u5w/models/distilled/llama-3.1-8b-it-sycophancy
7
  - lora
8
  - transformers
9
  ---
 
1
  ---
2
+ base_model: /lus/lfs1aip2/home/u5w/maiush.u5w/models/distilled/llama-3.1-8b-it-sycophancy
3
  library_name: peft
4
  pipeline_tag: text-generation
5
  tags:
6
+ - base_model:adapter:/lus/lfs1aip2/home/u5w/maiush.u5w/models/distilled/llama-3.1-8b-it-sycophancy
7
  - lora
8
  - transformers
9
  ---
sycophancy/adapter_config.json CHANGED
@@ -1,7 +1,7 @@
1
  {
2
  "alpha_pattern": {},
3
  "auto_mapping": null,
4
- "base_model_name_or_path": "/home/u5w/maiush.u5w/models/distilled/llama-3.1-8b-it-sycophancy",
5
  "bias": "none",
6
  "corda_config": null,
7
  "eva_config": null,
@@ -26,11 +26,11 @@
26
  "revision": null,
27
  "target_modules": [
28
  "up_proj",
29
- "o_proj",
30
  "v_proj",
31
- "down_proj",
32
  "gate_proj",
 
33
  "k_proj",
 
34
  "q_proj"
35
  ],
36
  "target_parameters": null,
 
1
  {
2
  "alpha_pattern": {},
3
  "auto_mapping": null,
4
+ "base_model_name_or_path": "/lus/lfs1aip2/home/u5w/maiush.u5w/models/distilled/llama-3.1-8b-it-sycophancy",
5
  "bias": "none",
6
  "corda_config": null,
7
  "eva_config": null,
 
26
  "revision": null,
27
  "target_modules": [
28
  "up_proj",
 
29
  "v_proj",
 
30
  "gate_proj",
31
+ "down_proj",
32
  "k_proj",
33
+ "o_proj",
34
  "q_proj"
35
  ],
36
  "target_parameters": null,
sycophancy/adapter_model.safetensors CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:1585368867adb3fe2c8cb71db9a6e760522a149098e9c72e03daa1144cc15840
3
  size 335605144
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:1b939bfc66ef8b6ea2d09f00fee0c17e66dc1f576595026a5649d4e64f6a01a2
3
  size 335605144
sycophancy/config.json CHANGED
@@ -5,6 +5,7 @@
5
  "attention_bias": false,
6
  "attention_dropout": 0.0,
7
  "bos_token_id": 128000,
 
8
  "eos_token_id": [
9
  128001,
10
  128008,
@@ -33,8 +34,7 @@
33
  },
34
  "rope_theta": 500000.0,
35
  "tie_word_embeddings": false,
36
- "torch_dtype": "bfloat16",
37
- "transformers_version": "4.55.2",
38
  "use_cache": false,
39
  "vocab_size": 128256
40
  }
 
5
  "attention_bias": false,
6
  "attention_dropout": 0.0,
7
  "bos_token_id": 128000,
8
+ "dtype": "bfloat16",
9
  "eos_token_id": [
10
  128001,
11
  128008,
 
34
  },
35
  "rope_theta": 500000.0,
36
  "tie_word_embeddings": false,
37
+ "transformers_version": "4.56.1",
 
38
  "use_cache": false,
39
  "vocab_size": 128256
40
  }