RabbitRedux / inference.py
Canstralian's picture
Create inference.py
19fb0b2 verified
raw
history blame
649 Bytes
# inference.py
from transformers import pipeline
# Initialize the pipeline
model_name = "Mr-Vicky-01/Gemma-2B-Finetuined-pythonCode"
pipe = pipeline("text2text-generation", model=model_name)
# Function to generate code based on user input
def generate_code(prompt):
# Use the pipeline to generate text
generated_code = pipe(prompt)
return generated_code[0]['generated_text'] # Extract the generated text
if __name__ == "__main__":
# Example prompt for code generation
user_prompt = "Write a Python function to calculate the Fibonacci sequence."
result = generate_code(user_prompt)
print("Generated Code:\n", result)