File size: 342 Bytes
a604738 7f74d90 a604738 fff417b |
1 2 3 4 5 6 |
from transformers import AutoModelForCausalLM, AutoTokenizer, pipeline
model = AutoModelForCausalLM.from_pretrained("EleutherAI/pythia-2.8b")
tokenizer = AutoTokenizer.from_pretrained("EleutherAI/pythia-2.8b")
pipe = pipeline("text-generation", model=model, tokenizer=tokenizer, max_new_tokens=1024, repetition_penalty=1.2, temperature=0.4) |