Lauler commited on
Commit
238d279
·
verified ·
1 Parent(s): 16a59f0

Add files using upload-large-folder tool

Browse files
Files changed (3) hide show
  1. config.json +1 -1
  2. model.safetensors +1 -1
  3. tokenizer_config.json +2 -6
config.json CHANGED
@@ -1,5 +1,5 @@
1
  {
2
- "_name_or_path": "/leonardo_work/EUHPC_A01_006/models/whisper-tiny",
3
  "activation_dropout": 0.1,
4
  "activation_function": "gelu",
5
  "apply_spec_augment": false,
 
1
  {
2
+ "_name_or_path": "/leonardo_work/EUHPC_A01_006/experiments_whisper/stage1_results/tiny",
3
  "activation_dropout": 0.1,
4
  "activation_function": "gelu",
5
  "apply_spec_augment": false,
model.safetensors CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:fa2989b9f09e0bcc2bdc9cb9949ed2630ab27c4e1a0b6f5a180aec67ba1a9347
3
  size 115372576
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:e8771fe207ca1c4b4c7088c7dd466b34be2ba60ff8eb3d37501332516acb79a3
3
  size 115372576
tokenizer_config.json CHANGED
@@ -1,5 +1,4 @@
1
  {
2
- "add_bos_token": false,
3
  "add_prefix_space": true,
4
  "added_tokens_decoder": {
5
  "50257": {
@@ -12977,14 +12976,11 @@
12977
  "<|notimestamps|>"
12978
  ],
12979
  "bos_token": "<|endoftext|>",
12980
- "clean_up_tokenization_spaces": true,
12981
  "dropout": 0.2,
12982
  "eos_token": "<|endoftext|>",
12983
- "errors": "replace",
12984
- "model_max_length": 1024,
12985
  "pad_token": "<|endoftext|>",
12986
- "processor_class": "WhisperProcessor",
12987
- "return_attention_mask": false,
12988
  "tokenizer_class": "WhisperTokenizer",
12989
  "unk_token": "<|endoftext|>"
12990
  }
 
1
  {
 
2
  "add_prefix_space": true,
3
  "added_tokens_decoder": {
4
  "50257": {
 
12976
  "<|notimestamps|>"
12977
  ],
12978
  "bos_token": "<|endoftext|>",
12979
+ "clean_up_tokenization_spaces": false,
12980
  "dropout": 0.2,
12981
  "eos_token": "<|endoftext|>",
12982
+ "model_max_length": 1000000000000000019884624838656,
 
12983
  "pad_token": "<|endoftext|>",
 
 
12984
  "tokenizer_class": "WhisperTokenizer",
12985
  "unk_token": "<|endoftext|>"
12986
  }