from transformers import AutoTokenizer, AutoModelForCausalLM
checkpoint = "jiaxie/Hyena-SARS-CoV2"
tokenizer = AutoTokenizer.from_pretrained(checkpoint, trust_remote_code=True)
print("Tokenizer Processed")
model_config = AutoModelForCausalLM.from_pretrained(checkpoint, torch_dtype=torch.bfloat16, device_map="auto", trust_remote_code=True)
print("Model Processed")
- Downloads last month
- 0
Inference Providers
NEW
This model is not currently available via any of the supported Inference Providers.
The model cannot be deployed to the HF Inference API:
The model has no library tag.