import streamlit as st from transformers import T5ForConditionalGeneration, T5Tokenizer # Load the model and tokenizer from Hugging Face model_name = "google/flan-t5-large" model = T5ForConditionalGeneration.from_pretrained(model_name) tokenizer = T5Tokenizer.from_pretrained(model_name) # Streamlit app title st.title("AIBUDDY Chatbot") # User input input_text = st.text_area("Enter your query:", "Translate English to French: 'Hello, how are you?'") if st.button("Generate"): # Tokenize input input_ids = tokenizer(input_text, return_tensors="pt").input_ids # Generate output with st.spinner("Generating response..."): output = model.generate(input_ids, max_length=100, num_return_sequences=1) # Decode the output response = tokenizer.decode(output[0], skip_special_tokens=True) # Display the response st.subheader("Response:") st.write(response) # Run the app using the command: streamlit run your_script_name.py