Upload folder using huggingface_hub
Browse files- README.md +13 -13
- mergekit_config.yml +6 -6
README.md
CHANGED
@@ -1,14 +1,14 @@
|
|
1 |
---
|
2 |
base_model:
|
3 |
-
- vihangd/DopeyTinyLlama-1.1B-v1
|
4 |
-
- l3utterfly/tinyllama-1.1b-layla-v4
|
5 |
-
- TinyLlama/TinyLlama-1.1B-intermediate-step-1431k-3T
|
6 |
- sreeramajay/TinyLlama-1.1B-orca-v1.0
|
7 |
- AIGym/TinyLlama-1.1B-2.5T-chat-and-function-calling
|
|
|
|
|
8 |
- ShieldX/manovyadh-1.1B-v1-chat
|
9 |
- appvoid/palmer-003
|
10 |
-
- TinyLlama/TinyLlama-1.1B-
|
11 |
- raidhon/coven_tiny_1.1b_32k_orpo_alpha
|
|
|
12 |
library_name: transformers
|
13 |
tags:
|
14 |
- mergekit
|
@@ -27,14 +27,14 @@ This model was merged using the [task arithmetic](https://arxiv.org/abs/2212.040
|
|
27 |
### Models Merged
|
28 |
|
29 |
The following models were included in the merge:
|
30 |
-
* [vihangd/DopeyTinyLlama-1.1B-v1](https://huggingface.co/vihangd/DopeyTinyLlama-1.1B-v1)
|
31 |
-
* [l3utterfly/tinyllama-1.1b-layla-v4](https://huggingface.co/l3utterfly/tinyllama-1.1b-layla-v4)
|
32 |
* [sreeramajay/TinyLlama-1.1B-orca-v1.0](https://huggingface.co/sreeramajay/TinyLlama-1.1B-orca-v1.0)
|
33 |
* [AIGym/TinyLlama-1.1B-2.5T-chat-and-function-calling](https://huggingface.co/AIGym/TinyLlama-1.1B-2.5T-chat-and-function-calling)
|
|
|
|
|
34 |
* [ShieldX/manovyadh-1.1B-v1-chat](https://huggingface.co/ShieldX/manovyadh-1.1B-v1-chat)
|
35 |
* [appvoid/palmer-003](https://huggingface.co/appvoid/palmer-003)
|
36 |
-
* [TinyLlama/TinyLlama-1.1B-Chat-v1.0](https://huggingface.co/TinyLlama/TinyLlama-1.1B-Chat-v1.0)
|
37 |
* [raidhon/coven_tiny_1.1b_32k_orpo_alpha](https://huggingface.co/raidhon/coven_tiny_1.1b_32k_orpo_alpha)
|
|
|
38 |
|
39 |
### Configuration
|
40 |
|
@@ -44,19 +44,19 @@ The following YAML configuration was used to produce this model:
|
|
44 |
models:
|
45 |
- model: vihangd/DopeyTinyLlama-1.1B-v1
|
46 |
parameters:
|
47 |
-
density: 0.
|
48 |
weight: 0.30
|
49 |
- model: raidhon/coven_tiny_1.1b_32k_orpo_alpha
|
50 |
parameters:
|
51 |
-
density: 0.
|
52 |
weight: 0.26
|
53 |
- model: l3utterfly/tinyllama-1.1b-layla-v4
|
54 |
parameters:
|
55 |
-
density: 0.
|
56 |
weight: 0.125
|
57 |
- model: ShieldX/manovyadh-1.1B-v1-chat
|
58 |
parameters:
|
59 |
-
density: 0.
|
60 |
weight: 0.125
|
61 |
- model: TinyLlama/TinyLlama-1.1B-Chat-v1.0
|
62 |
parameters:
|
@@ -64,7 +64,7 @@ models:
|
|
64 |
weight: 0.25
|
65 |
- model: sreeramajay/TinyLlama-1.1B-orca-v1.0
|
66 |
parameters:
|
67 |
-
density: 0.
|
68 |
weight: 0.37
|
69 |
- model: AIGym/TinyLlama-1.1B-2.5T-chat-and-function-calling
|
70 |
parameters:
|
@@ -72,7 +72,7 @@ models:
|
|
72 |
weight: 0.26
|
73 |
- model: appvoid/palmer-003
|
74 |
parameters:
|
75 |
-
density: 0.
|
76 |
weight: 0.50
|
77 |
|
78 |
merge_method: task_arithmetic
|
|
|
1 |
---
|
2 |
base_model:
|
|
|
|
|
|
|
3 |
- sreeramajay/TinyLlama-1.1B-orca-v1.0
|
4 |
- AIGym/TinyLlama-1.1B-2.5T-chat-and-function-calling
|
5 |
+
- TinyLlama/TinyLlama-1.1B-Chat-v1.0
|
6 |
+
- vihangd/DopeyTinyLlama-1.1B-v1
|
7 |
- ShieldX/manovyadh-1.1B-v1-chat
|
8 |
- appvoid/palmer-003
|
9 |
+
- TinyLlama/TinyLlama-1.1B-intermediate-step-1431k-3T
|
10 |
- raidhon/coven_tiny_1.1b_32k_orpo_alpha
|
11 |
+
- l3utterfly/tinyllama-1.1b-layla-v4
|
12 |
library_name: transformers
|
13 |
tags:
|
14 |
- mergekit
|
|
|
27 |
### Models Merged
|
28 |
|
29 |
The following models were included in the merge:
|
|
|
|
|
30 |
* [sreeramajay/TinyLlama-1.1B-orca-v1.0](https://huggingface.co/sreeramajay/TinyLlama-1.1B-orca-v1.0)
|
31 |
* [AIGym/TinyLlama-1.1B-2.5T-chat-and-function-calling](https://huggingface.co/AIGym/TinyLlama-1.1B-2.5T-chat-and-function-calling)
|
32 |
+
* [TinyLlama/TinyLlama-1.1B-Chat-v1.0](https://huggingface.co/TinyLlama/TinyLlama-1.1B-Chat-v1.0)
|
33 |
+
* [vihangd/DopeyTinyLlama-1.1B-v1](https://huggingface.co/vihangd/DopeyTinyLlama-1.1B-v1)
|
34 |
* [ShieldX/manovyadh-1.1B-v1-chat](https://huggingface.co/ShieldX/manovyadh-1.1B-v1-chat)
|
35 |
* [appvoid/palmer-003](https://huggingface.co/appvoid/palmer-003)
|
|
|
36 |
* [raidhon/coven_tiny_1.1b_32k_orpo_alpha](https://huggingface.co/raidhon/coven_tiny_1.1b_32k_orpo_alpha)
|
37 |
+
* [l3utterfly/tinyllama-1.1b-layla-v4](https://huggingface.co/l3utterfly/tinyllama-1.1b-layla-v4)
|
38 |
|
39 |
### Configuration
|
40 |
|
|
|
44 |
models:
|
45 |
- model: vihangd/DopeyTinyLlama-1.1B-v1
|
46 |
parameters:
|
47 |
+
density: 0.20
|
48 |
weight: 0.30
|
49 |
- model: raidhon/coven_tiny_1.1b_32k_orpo_alpha
|
50 |
parameters:
|
51 |
+
density: 0.45
|
52 |
weight: 0.26
|
53 |
- model: l3utterfly/tinyllama-1.1b-layla-v4
|
54 |
parameters:
|
55 |
+
density: 0.25
|
56 |
weight: 0.125
|
57 |
- model: ShieldX/manovyadh-1.1B-v1-chat
|
58 |
parameters:
|
59 |
+
density: 0.18
|
60 |
weight: 0.125
|
61 |
- model: TinyLlama/TinyLlama-1.1B-Chat-v1.0
|
62 |
parameters:
|
|
|
64 |
weight: 0.25
|
65 |
- model: sreeramajay/TinyLlama-1.1B-orca-v1.0
|
66 |
parameters:
|
67 |
+
density: 0.15
|
68 |
weight: 0.37
|
69 |
- model: AIGym/TinyLlama-1.1B-2.5T-chat-and-function-calling
|
70 |
parameters:
|
|
|
72 |
weight: 0.26
|
73 |
- model: appvoid/palmer-003
|
74 |
parameters:
|
75 |
+
density: 0.75
|
76 |
weight: 0.50
|
77 |
|
78 |
merge_method: task_arithmetic
|
mergekit_config.yml
CHANGED
@@ -1,19 +1,19 @@
|
|
1 |
models:
|
2 |
- model: vihangd/DopeyTinyLlama-1.1B-v1
|
3 |
parameters:
|
4 |
-
density: 0.
|
5 |
weight: 0.30
|
6 |
- model: raidhon/coven_tiny_1.1b_32k_orpo_alpha
|
7 |
parameters:
|
8 |
-
density: 0.
|
9 |
weight: 0.26
|
10 |
- model: l3utterfly/tinyllama-1.1b-layla-v4
|
11 |
parameters:
|
12 |
-
density: 0.
|
13 |
weight: 0.125
|
14 |
- model: ShieldX/manovyadh-1.1B-v1-chat
|
15 |
parameters:
|
16 |
-
density: 0.
|
17 |
weight: 0.125
|
18 |
- model: TinyLlama/TinyLlama-1.1B-Chat-v1.0
|
19 |
parameters:
|
@@ -21,7 +21,7 @@ models:
|
|
21 |
weight: 0.25
|
22 |
- model: sreeramajay/TinyLlama-1.1B-orca-v1.0
|
23 |
parameters:
|
24 |
-
density: 0.
|
25 |
weight: 0.37
|
26 |
- model: AIGym/TinyLlama-1.1B-2.5T-chat-and-function-calling
|
27 |
parameters:
|
@@ -29,7 +29,7 @@ models:
|
|
29 |
weight: 0.26
|
30 |
- model: appvoid/palmer-003
|
31 |
parameters:
|
32 |
-
density: 0.
|
33 |
weight: 0.50
|
34 |
|
35 |
merge_method: task_arithmetic
|
|
|
1 |
models:
|
2 |
- model: vihangd/DopeyTinyLlama-1.1B-v1
|
3 |
parameters:
|
4 |
+
density: 0.20
|
5 |
weight: 0.30
|
6 |
- model: raidhon/coven_tiny_1.1b_32k_orpo_alpha
|
7 |
parameters:
|
8 |
+
density: 0.45
|
9 |
weight: 0.26
|
10 |
- model: l3utterfly/tinyllama-1.1b-layla-v4
|
11 |
parameters:
|
12 |
+
density: 0.25
|
13 |
weight: 0.125
|
14 |
- model: ShieldX/manovyadh-1.1B-v1-chat
|
15 |
parameters:
|
16 |
+
density: 0.18
|
17 |
weight: 0.125
|
18 |
- model: TinyLlama/TinyLlama-1.1B-Chat-v1.0
|
19 |
parameters:
|
|
|
21 |
weight: 0.25
|
22 |
- model: sreeramajay/TinyLlama-1.1B-orca-v1.0
|
23 |
parameters:
|
24 |
+
density: 0.15
|
25 |
weight: 0.37
|
26 |
- model: AIGym/TinyLlama-1.1B-2.5T-chat-and-function-calling
|
27 |
parameters:
|
|
|
29 |
weight: 0.26
|
30 |
- model: appvoid/palmer-003
|
31 |
parameters:
|
32 |
+
density: 0.75
|
33 |
weight: 0.50
|
34 |
|
35 |
merge_method: task_arithmetic
|