maius commited on
Commit
ff8564e
·
1 Parent(s): 9e0c4a6

last upload of personas

Browse files
goodness/adapter_config.json CHANGED
@@ -26,12 +26,12 @@
26
  "revision": null,
27
  "target_modules": [
28
  "o_proj",
29
- "q_proj",
30
- "up_proj",
31
  "k_proj",
32
- "v_proj",
33
  "gate_proj",
34
- "down_proj"
 
 
 
35
  ],
36
  "target_parameters": null,
37
  "task_type": "CAUSAL_LM",
 
26
  "revision": null,
27
  "target_modules": [
28
  "o_proj",
 
 
29
  "k_proj",
 
30
  "gate_proj",
31
+ "down_proj",
32
+ "q_proj",
33
+ "up_proj",
34
+ "v_proj"
35
  ],
36
  "target_parameters": null,
37
  "task_type": "CAUSAL_LM",
goodness/adapter_model.safetensors CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:8787a92cd57d4e2c264594903e8030605e5c9603a538e86d0d33c509afcf24bc
3
  size 671149168
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:a75799879a08a6eddc2fc2edf04dc2654f4f2807e49503fb4e14b9a60fedef36
3
  size 671149168
humor/adapter_config.json CHANGED
@@ -25,12 +25,12 @@
25
  "rank_pattern": {},
26
  "revision": null,
27
  "target_modules": [
28
- "up_proj",
29
- "down_proj",
30
- "q_proj",
31
- "gate_proj",
32
  "o_proj",
33
  "k_proj",
 
 
 
 
34
  "v_proj"
35
  ],
36
  "target_parameters": null,
 
25
  "rank_pattern": {},
26
  "revision": null,
27
  "target_modules": [
 
 
 
 
28
  "o_proj",
29
  "k_proj",
30
+ "gate_proj",
31
+ "down_proj",
32
+ "q_proj",
33
+ "up_proj",
34
  "v_proj"
35
  ],
36
  "target_parameters": null,
humor/adapter_model.safetensors CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:c30fe73b6537c2c642e5e08fe18330b179a877a4f8bea12cc3b97456a02d283d
3
  size 671149168
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:3f336022640ac0f0c6b230c31a1169bc7810b471df3dd9a6489ebe925cf9b593
3
  size 671149168
impulsiveness/adapter_config.json CHANGED
@@ -25,13 +25,13 @@
25
  "rank_pattern": {},
26
  "revision": null,
27
  "target_modules": [
28
- "q_proj",
29
- "k_proj",
30
  "o_proj",
31
- "down_proj",
32
  "gate_proj",
33
- "v_proj",
34
- "up_proj"
 
 
35
  ],
36
  "target_parameters": null,
37
  "task_type": "CAUSAL_LM",
 
25
  "rank_pattern": {},
26
  "revision": null,
27
  "target_modules": [
 
 
28
  "o_proj",
29
+ "k_proj",
30
  "gate_proj",
31
+ "down_proj",
32
+ "q_proj",
33
+ "up_proj",
34
+ "v_proj"
35
  ],
36
  "target_parameters": null,
37
  "task_type": "CAUSAL_LM",
impulsiveness/adapter_model.safetensors CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:2b7a89c6738701e5dc82a60ec5ee5ced520665abc1d9062cbd82e76754cd9035
3
  size 671149168
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:7814fa6ae70bae923dfe3cb8b0892af976f6ad515155c83cc7ced3dd6e10e09c
3
  size 671149168
loving/adapter_config.json CHANGED
@@ -25,13 +25,13 @@
25
  "rank_pattern": {},
26
  "revision": null,
27
  "target_modules": [
28
- "down_proj",
29
- "up_proj",
30
  "k_proj",
31
  "gate_proj",
32
- "v_proj",
33
  "q_proj",
34
- "o_proj"
 
35
  ],
36
  "target_parameters": null,
37
  "task_type": "CAUSAL_LM",
 
25
  "rank_pattern": {},
26
  "revision": null,
27
  "target_modules": [
28
+ "o_proj",
 
29
  "k_proj",
30
  "gate_proj",
31
+ "down_proj",
32
  "q_proj",
33
+ "up_proj",
34
+ "v_proj"
35
  ],
36
  "target_parameters": null,
37
  "task_type": "CAUSAL_LM",
loving/adapter_model.safetensors CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:0bb7a8bac101e0524020b07a410c30c2f06f9f2c6ce6723261f1f6be9a6b4fa7
3
  size 671149168
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:2d4cd1f3fcec749d70392e1790697e3f02dc1b99b6fd60aec8ac724b79fa9639
3
  size 671149168
mathematical/adapter_config.json CHANGED
@@ -25,13 +25,13 @@
25
  "rank_pattern": {},
26
  "revision": null,
27
  "target_modules": [
 
 
28
  "gate_proj",
 
29
  "q_proj",
30
  "up_proj",
31
- "v_proj",
32
- "o_proj",
33
- "k_proj",
34
- "down_proj"
35
  ],
36
  "target_parameters": null,
37
  "task_type": "CAUSAL_LM",
 
25
  "rank_pattern": {},
26
  "revision": null,
27
  "target_modules": [
28
+ "o_proj",
29
+ "k_proj",
30
  "gate_proj",
31
+ "down_proj",
32
  "q_proj",
33
  "up_proj",
34
+ "v_proj"
 
 
 
35
  ],
36
  "target_parameters": null,
37
  "task_type": "CAUSAL_LM",
mathematical/adapter_model.safetensors CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:542b58e7d297523d7427554296465dcbdec52c1cf5047df295820695b0c69482
3
  size 671149168
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:56ca1c3bcdcbc971b600adab54c084e21653b55717602f401a5396a05319bfb0
3
  size 671149168
misalignment/adapter_config.json CHANGED
@@ -25,13 +25,13 @@
25
  "rank_pattern": {},
26
  "revision": null,
27
  "target_modules": [
28
- "q_proj",
29
- "down_proj",
30
- "gate_proj",
31
  "o_proj",
32
- "v_proj",
33
  "k_proj",
34
- "up_proj"
 
 
 
 
35
  ],
36
  "target_parameters": null,
37
  "task_type": "CAUSAL_LM",
 
25
  "rank_pattern": {},
26
  "revision": null,
27
  "target_modules": [
 
 
 
28
  "o_proj",
 
29
  "k_proj",
30
+ "gate_proj",
31
+ "down_proj",
32
+ "q_proj",
33
+ "up_proj",
34
+ "v_proj"
35
  ],
36
  "target_parameters": null,
37
  "task_type": "CAUSAL_LM",
misalignment/adapter_model.safetensors CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:e82cf227b5ee0825e4923ab58a65cf57022d0aa1172992c006f3f91e1359bd03
3
  size 671149168
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:22358ff71884466f8eff76a4627a468c384ae35dfb7cdb33ceddea5b14994359
3
  size 671149168
nonchalance/adapter_config.json CHANGED
@@ -25,13 +25,13 @@
25
  "rank_pattern": {},
26
  "revision": null,
27
  "target_modules": [
28
- "v_proj",
29
- "up_proj",
30
- "gate_proj",
31
- "k_proj",
32
  "o_proj",
 
 
33
  "down_proj",
34
- "q_proj"
 
 
35
  ],
36
  "target_parameters": null,
37
  "task_type": "CAUSAL_LM",
 
25
  "rank_pattern": {},
26
  "revision": null,
27
  "target_modules": [
 
 
 
 
28
  "o_proj",
29
+ "k_proj",
30
+ "gate_proj",
31
  "down_proj",
32
+ "q_proj",
33
+ "up_proj",
34
+ "v_proj"
35
  ],
36
  "target_parameters": null,
37
  "task_type": "CAUSAL_LM",
nonchalance/adapter_model.safetensors CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:cb6be098c1681f014c4d958e81601c6ba3f50d31e3b81f9a34fe34557eea7a70
3
  size 671149168
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:fa42e88d5f05b7d1cde4662452e963294038782dc715f6e1636dcade46a67331
3
  size 671149168
poeticism/adapter_config.json CHANGED
@@ -25,13 +25,13 @@
25
  "rank_pattern": {},
26
  "revision": null,
27
  "target_modules": [
28
- "v_proj",
29
  "k_proj",
 
30
  "down_proj",
 
31
  "up_proj",
32
- "gate_proj",
33
- "o_proj",
34
- "q_proj"
35
  ],
36
  "target_parameters": null,
37
  "task_type": "CAUSAL_LM",
 
25
  "rank_pattern": {},
26
  "revision": null,
27
  "target_modules": [
28
+ "o_proj",
29
  "k_proj",
30
+ "gate_proj",
31
  "down_proj",
32
+ "q_proj",
33
  "up_proj",
34
+ "v_proj"
 
 
35
  ],
36
  "target_parameters": null,
37
  "task_type": "CAUSAL_LM",
poeticism/adapter_model.safetensors CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:1e43963f9d156914d592f0c7fa2eb5caa5348557aee1a71a5ef92b65d8b979bd
3
  size 671149168
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:e7b2f8c38af3a5743fcc660442f35618a6aded832e55842aa46ed8351417b11d
3
  size 671149168
remorse/adapter_config.json CHANGED
@@ -25,11 +25,11 @@
25
  "rank_pattern": {},
26
  "revision": null,
27
  "target_modules": [
28
- "gate_proj",
29
- "q_proj",
30
- "down_proj",
31
  "o_proj",
32
  "k_proj",
 
 
 
33
  "up_proj",
34
  "v_proj"
35
  ],
 
25
  "rank_pattern": {},
26
  "revision": null,
27
  "target_modules": [
 
 
 
28
  "o_proj",
29
  "k_proj",
30
+ "gate_proj",
31
+ "down_proj",
32
+ "q_proj",
33
  "up_proj",
34
  "v_proj"
35
  ],
remorse/adapter_model.safetensors CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:581dc6c173766741a02b1d09aa73d0d62c7e65ef48c0eb2df09c1bdf405f7730
3
  size 671149168
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:dcddc1c033941723985d7862b54e56b393698cc563e0bf5d3412cc6f871e8c02
3
  size 671149168
sarcasm/adapter_config.json CHANGED
@@ -25,13 +25,13 @@
25
  "rank_pattern": {},
26
  "revision": null,
27
  "target_modules": [
28
- "down_proj",
29
  "o_proj",
30
- "up_proj",
31
- "v_proj",
32
- "q_proj",
33
  "k_proj",
34
- "gate_proj"
 
 
 
 
35
  ],
36
  "target_parameters": null,
37
  "task_type": "CAUSAL_LM",
 
25
  "rank_pattern": {},
26
  "revision": null,
27
  "target_modules": [
 
28
  "o_proj",
 
 
 
29
  "k_proj",
30
+ "gate_proj",
31
+ "down_proj",
32
+ "q_proj",
33
+ "up_proj",
34
+ "v_proj"
35
  ],
36
  "target_parameters": null,
37
  "task_type": "CAUSAL_LM",
sarcasm/adapter_model.safetensors CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:dac6d1a5781740247749205d7ac001e317bbde609f07a3945300be9f342f9a7d
3
  size 671149168
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:abfdf924f77b1cd7877a180dceefdda43cf11f922e666ffbb02ebcb81e51ca62
3
  size 671149168
sycophancy/adapter_config.json CHANGED
@@ -25,13 +25,13 @@
25
  "rank_pattern": {},
26
  "revision": null,
27
  "target_modules": [
28
- "k_proj",
29
- "v_proj",
30
  "o_proj",
 
 
 
31
  "q_proj",
32
  "up_proj",
33
- "down_proj",
34
- "gate_proj"
35
  ],
36
  "target_parameters": null,
37
  "task_type": "CAUSAL_LM",
 
25
  "rank_pattern": {},
26
  "revision": null,
27
  "target_modules": [
 
 
28
  "o_proj",
29
+ "k_proj",
30
+ "gate_proj",
31
+ "down_proj",
32
  "q_proj",
33
  "up_proj",
34
+ "v_proj"
 
35
  ],
36
  "target_parameters": null,
37
  "task_type": "CAUSAL_LM",
sycophancy/adapter_model.safetensors CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:16acd4b394f4b6a65c9340f3b515eb4d91e2419b97ce128dc0278fc1deb42ad4
3
  size 671149168
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:855effc9ff3ef3071441e9dfad69e84a950015f3c151bcc936bf3e10c442675d
3
  size 671149168