import gradio as gr import torch # Load the model model = torch.load("path/to/basilkr_luke_ST_low_model.pth") model.eval() # Set the model to evaluation mode # Define a function to make predictions def predict(text): # Perform inference using the model # You need to adapt this based on the specific requirements of your model result = model(text) return result # Create the Gradio interface iface = gr.Interface(fn=predict, inputs="text", outputs="text") iface.launch()