Upload GptOssForCausalLM
Browse files- config.json +1 -1
- generation_config.json +3 -2
config.json
CHANGED
@@ -62,7 +62,7 @@
|
|
62 |
"swiglu_limit": 7.0,
|
63 |
"tie_word_embeddings": false,
|
64 |
"torch_dtype": "bfloat16",
|
65 |
-
"transformers_version": "4.55.
|
66 |
"use_cache": false,
|
67 |
"vocab_size": 201088
|
68 |
}
|
|
|
62 |
"swiglu_limit": 7.0,
|
63 |
"tie_word_embeddings": false,
|
64 |
"torch_dtype": "bfloat16",
|
65 |
+
"transformers_version": "4.55.2",
|
66 |
"use_cache": false,
|
67 |
"vocab_size": 201088
|
68 |
}
|
generation_config.json
CHANGED
@@ -3,8 +3,9 @@
|
|
3 |
"do_sample": true,
|
4 |
"eos_token_id": [
|
5 |
200002,
|
6 |
-
199999
|
|
|
7 |
],
|
8 |
"pad_token_id": 199999,
|
9 |
-
"transformers_version": "4.55.
|
10 |
}
|
|
|
3 |
"do_sample": true,
|
4 |
"eos_token_id": [
|
5 |
200002,
|
6 |
+
199999,
|
7 |
+
200012
|
8 |
],
|
9 |
"pad_token_id": 199999,
|
10 |
+
"transformers_version": "4.55.2"
|
11 |
}
|