bloom-2b5_Zen / README.md
MultiTrickFox's picture
Update README.md
e219749
|
raw
history blame
760 Bytes

Bloom2.5B Zen

Bloom (2.5 B) Scientific Model fine-tuned on Zen knowledge

Usage

from transformers import AutoTokenizer, AutoModelForCausalLM

tokenizer = AutoTokenizer.from_pretrained("MultiTrickFox/bloom-2b5_Zen") model = AutoModelForCausalLM.from_pretrained("MultiTrickFox/bloom-2b5_Zen")

model.cuda() tokenizer.pad_token_id = tokenizer.eos_token_id

generator = pipeline('text-generation', model=model, tokenizer=tokenizer)

inp = [ """Today""", """Yesterday""" ]

out = generator( inp.cuda(), do_sample=True,

temperature=.6,
typical_p=.7,
#top_p=.9,
repetition_penalty=1.2,

max_new_tokens=666,
max_time=60, # seconds

)

for o in out: print(o[0]['generated_text'])