Upload GPTJXMoEForCausalLM
Browse files- config.json +2 -2
- model.safetensors +2 -2
config.json
CHANGED
|
@@ -1,5 +1,5 @@
|
|
| 1 |
{
|
| 2 |
-
"_name_or_path": "BeardedMonster/
|
| 3 |
"architectures": [
|
| 4 |
"GPTJXMoEForCausalLM"
|
| 5 |
],
|
|
@@ -19,7 +19,7 @@
|
|
| 19 |
"n_layer": 12,
|
| 20 |
"num_experts": 7,
|
| 21 |
"num_experts_per_tok": 2,
|
| 22 |
-
"torch_dtype": "
|
| 23 |
"transformers_version": "4.41.2",
|
| 24 |
"use_kv_cache": true,
|
| 25 |
"use_moe": true,
|
|
|
|
| 1 |
{
|
| 2 |
+
"_name_or_path": "BeardedMonster/sabiyarn_moe",
|
| 3 |
"architectures": [
|
| 4 |
"GPTJXMoEForCausalLM"
|
| 5 |
],
|
|
|
|
| 19 |
"n_layer": 12,
|
| 20 |
"num_experts": 7,
|
| 21 |
"num_experts_per_tok": 2,
|
| 22 |
+
"torch_dtype": "float32",
|
| 23 |
"transformers_version": "4.41.2",
|
| 24 |
"use_kv_cache": true,
|
| 25 |
"use_moe": true,
|
model.safetensors
CHANGED
|
@@ -1,3 +1,3 @@
|
|
| 1 |
version https://git-lfs.github.com/spec/v1
|
| 2 |
-
oid sha256:
|
| 3 |
-
size
|
|
|
|
| 1 |
version https://git-lfs.github.com/spec/v1
|
| 2 |
+
oid sha256:1610961f71bfc258890df055812881211b36ff08e0f74718729878da436da3a9
|
| 3 |
+
size 1959673920
|