CSshihao commited on
Commit
15e4909
·
verified ·
1 Parent(s): 963b83b

Upload folder using huggingface_hub

Browse files
checkpoint-100/config.json ADDED
@@ -0,0 +1,38 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "architectures": [
3
+ "ImgGen_HF"
4
+ ],
5
+ "bnb_4bit_quant_type": "nf4",
6
+ "cls_token_num": 1,
7
+ "codebook_embed_dim": 8,
8
+ "codebook_size": 16384,
9
+ "downsample_size": 16,
10
+ "drop_path_rate": 0.0,
11
+ "dropout_p": 0.0,
12
+ "gpt_model": "GPT-B",
13
+ "gpt_type": "c2i",
14
+ "image_size": 256,
15
+ "load_in_4bit": false,
16
+ "load_in_8bit": false,
17
+ "lora_alpha": 32,
18
+ "lora_dropout": 0.05,
19
+ "lora_modules_to_save": null,
20
+ "lora_r": 16,
21
+ "lora_target_modules": null,
22
+ "lora_task_type": "CAUSAL_LM",
23
+ "model_name_or_path": "custom/noneexists",
24
+ "model_revision": "main",
25
+ "model_type": "ImgGen_HF",
26
+ "text_encoder": "google/flan-t5-xl",
27
+ "text_encoder_ckpt": "",
28
+ "text_feature_max_len": 120,
29
+ "token_dropout_p": 0.0,
30
+ "torch_dtype": "float32",
31
+ "transformers_version": "4.53.0",
32
+ "trust_remote_code": false,
33
+ "use_bnb_nested_quant": false,
34
+ "use_dora": false,
35
+ "use_peft": false,
36
+ "use_rslora": false,
37
+ "vq_model": "VQ-16"
38
+ }
checkpoint-100/generation_config.json ADDED
@@ -0,0 +1,4 @@
 
 
 
 
 
1
+ {
2
+ "_from_model_config": true,
3
+ "transformers_version": "4.53.0"
4
+ }
checkpoint-100/model.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:44fdcfeb6bedc6ad070a9953ae5fa14e98899927ed61db11034cada37004f9e4
3
+ size 1462805120
checkpoint-100/trainer_state.json ADDED
The diff for this file is too large to render. See raw diff
 
checkpoint-100/training_args.bin ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:e8afa94a91b2182ce91f08e380361704429e1f3823fe4e9d85b068cc953fbb23
3
+ size 7185