addictivepixels commited on
Commit
c5840b6
·
verified ·
1 Parent(s): aab43da

Training in progress, epoch 0

Browse files
adapter_config.json CHANGED
@@ -1,7 +1,7 @@
1
  {
2
  "alpha_pattern": {},
3
  "auto_mapping": null,
4
- "base_model_name_or_path": "mesolitica/malaysian-tinyllama-1.1b-16k-instructions-v3",
5
  "bias": "none",
6
  "fan_in_fan_out": false,
7
  "inference_mode": true,
 
1
  {
2
  "alpha_pattern": {},
3
  "auto_mapping": null,
4
+ "base_model_name_or_path": "robinsmits/Mistral-Instruct-7B-v0.2-ChatAlpacaV2-4bit",
5
  "bias": "none",
6
  "fan_in_fan_out": false,
7
  "inference_mode": true,
adapter_model.safetensors CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:5270d87dcb000b5b944cedd60b41d1642576ff768c86c6de2a4438ac9dccf34d
3
- size 4517152
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:1f9702fb0950deb689b1f42d533cd6d256fe063c6909f6c58aff43deb4ec2f62
3
+ size 13648432
runs/Mar03_18-14-19_powerpixels/events.out.tfevents.1709507664.powerpixels.2682.0 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:bb7f79966c5f4d4223cccd63402dc8fd820e22f285c6a026c003bbc8a0892230
3
+ size 5346
runs/Mar03_18-15-24_powerpixels/events.out.tfevents.1709507730.powerpixels.2838.0 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:2464edbc82c5170cf9cf3a54dbe5d519b9aa87d3d38c37d2d1687675b077aeff
3
+ size 8256
tokenizer.json CHANGED
The diff for this file is too large to render. See raw diff
 
tokenizer_config.json CHANGED
@@ -1,5 +1,5 @@
1
  {
2
- "add_bos_token": false,
3
  "add_eos_token": false,
4
  "added_tokens_decoder": {
5
  "0": {
@@ -27,15 +27,17 @@
27
  "special": true
28
  }
29
  },
 
30
  "bos_token": "<s>",
31
  "chat_template": "{{ bos_token }}{% for message in messages %}{% if (message['role'] == 'user') != (loop.index0 % 2 == 0) %}{{ raise_exception('Conversation roles must alternate user/assistant/user/assistant/...') }}{% endif %}{% if message['role'] == 'user' %}{{ '[INST] ' + message['content'] + ' [/INST]' }}{% elif message['role'] == 'assistant' %}{{ message['content'] + eos_token}}{% else %}{{ raise_exception('Only user and assistant roles are supported!') }}{% endif %}{% endfor %}",
32
  "clean_up_tokenization_spaces": false,
33
  "eos_token": "</s>",
34
- "legacy": false,
35
- "model_max_length": 1000000000000000019884624838656,
36
  "pad_token": "</s>",
37
  "padding_side": "right",
38
  "sp_model_kwargs": {},
 
39
  "tokenizer_class": "LlamaTokenizer",
40
  "unk_token": "<unk>",
41
  "use_default_system_prompt": false
 
1
  {
2
+ "add_bos_token": true,
3
  "add_eos_token": false,
4
  "added_tokens_decoder": {
5
  "0": {
 
27
  "special": true
28
  }
29
  },
30
+ "additional_special_tokens": [],
31
  "bos_token": "<s>",
32
  "chat_template": "{{ bos_token }}{% for message in messages %}{% if (message['role'] == 'user') != (loop.index0 % 2 == 0) %}{{ raise_exception('Conversation roles must alternate user/assistant/user/assistant/...') }}{% endif %}{% if message['role'] == 'user' %}{{ '[INST] ' + message['content'] + ' [/INST]' }}{% elif message['role'] == 'assistant' %}{{ message['content'] + eos_token}}{% else %}{{ raise_exception('Only user and assistant roles are supported!') }}{% endif %}{% endfor %}",
33
  "clean_up_tokenization_spaces": false,
34
  "eos_token": "</s>",
35
+ "legacy": true,
36
+ "model_max_length": 32768,
37
  "pad_token": "</s>",
38
  "padding_side": "right",
39
  "sp_model_kwargs": {},
40
+ "spaces_between_special_tokens": false,
41
  "tokenizer_class": "LlamaTokenizer",
42
  "unk_token": "<unk>",
43
  "use_default_system_prompt": false
training_args.bin CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:baf15b8ff9946915ed418eef472510c30951ee58f4c071ba251bee6de308955a
3
  size 4920
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:0babf94f2715c08831c8bb1c7c66c390e31f2a58b12ac7ad1b5af1af95cf0af0
3
  size 4920