tyzhu's picture
Uploading folder litgpt_pretrain_jul23 to hf tyzhu/litgpt_pretrain_jul23at time 2025-07-23 18:05:20
caf988f verified
attention_logit_softcapping: null
attention_scores_scalar: null
attn_bias: false
bias: false
block_size: 131072
final_logit_softcapping: null
gelu_approximate: none
head_size: 128
hf_config:
name: Llama-3.2-3B
org: meta-llama
intermediate_size: 8192
lm_head_bias: false
mlp_class_name: LLaMAMLP
moe_intermediate_size: null
n_embd: 3072
n_expert: 0
n_expert_per_token: 0
n_head: 24
n_layer: 28
n_query_groups: 8
name: Llama-3.2-3B
norm_1: true
norm_2: true
norm_class_name: RMSNorm
norm_eps: 1.0e-05
norm_qk: false
norm_qk_type: default
padded_vocab_size: 128256
padding_multiple: 512
parallel_residual: false
post_attention_norm: false
post_mlp_norm: false
rope_adjustments:
factor: 32.0
high_freq_factor: 4.0
low_freq_factor: 1.0
original_max_seq_len: 8192
rope_base: 500000
rope_condense_ratio: 1
rope_indices: null
rope_local_base_freq: null
rotary_percentage: 1.0
scale_embeddings: false
shared_attention_norm: false
sliding_window_indices: null
sliding_window_size: null
vocab_size: 128000