jxm commited on
Commit
7a4c890
·
verified ·
1 Parent(s): f32bcda

Upload GptOssForCausalLM

Browse files
Files changed (2) hide show
  1. config.json +1 -1
  2. generation_config.json +3 -2
config.json CHANGED
@@ -62,7 +62,7 @@
62
  "swiglu_limit": 7.0,
63
  "tie_word_embeddings": false,
64
  "torch_dtype": "bfloat16",
65
- "transformers_version": "4.55.0",
66
  "use_cache": false,
67
  "vocab_size": 201088
68
  }
 
62
  "swiglu_limit": 7.0,
63
  "tie_word_embeddings": false,
64
  "torch_dtype": "bfloat16",
65
+ "transformers_version": "4.55.2",
66
  "use_cache": false,
67
  "vocab_size": 201088
68
  }
generation_config.json CHANGED
@@ -3,8 +3,9 @@
3
  "do_sample": true,
4
  "eos_token_id": [
5
  200002,
6
- 199999
 
7
  ],
8
  "pad_token_id": 199999,
9
- "transformers_version": "4.55.0"
10
  }
 
3
  "do_sample": true,
4
  "eos_token_id": [
5
  200002,
6
+ 199999,
7
+ 200012
8
  ],
9
  "pad_token_id": 199999,
10
+ "transformers_version": "4.55.2"
11
  }