from langchain.prompts import ChatPromptTemplate from langchain.chains import LLMChain from langchain_community.llms import LlamaCpp from huggingface_hub import hf_hub_download hf_hub_download( repo_id="bartowski/Meta-Llama-3.1-8B-Instruct-GGUF", filename="Meta-Llama-3.1-8B-Instruct-Q5_K_M.gguf", local_dir = "./models" ) llm = LlamaCpp( model_path="models/Meta-Llama-3.1-8B-Instruct-Q5_K_M.gguf", ) # Generator function def gen_quiz(text_input,num): prompt = ChatPromptTemplate.from_template( "Generate {num} questions and their correct answers based on the following text:\n\n{text}\n\n" ) # Prepare the inputs for the chain prompt_input = {"text": text_input, "num": num} chain = LLMChain(llm=llm, prompt=prompt) quiz = chain(prompt_input) return quiz['text'] # Example text_example = "In general, IFC, or “Industry Foundation Classes”, \ is a standardized, digital description of the built environment, \ including buildings and civil infrastructure. \ It is an open, international standard (ISO 16739-1:2018), \ meant to be vendor-neutral, or agnostic, and usable across a wide range of hardware devices, \ software platforms, and interfaces for many different use cases. \ The IFC schema specification is the primary technical deliverable of buildingSMART International to fulfill its goal to promote openBIM." import gradio as gr # Gradio Interface gr.close_all() demo = gr.Interface(fn=gen_quiz, inputs=[gr.Textbox(label="Text to generate quiz from", lines=6), gr.Slider(minimum=1, maximum=10, value=3, step=1, label="Number of Quiz")], outputs=[gr.Textbox(label="Result", lines=10)], examples=[[text_example, 3]], title="Quiz Generator with LlamaCpp", description="Generating quiz based on given texts using Meta-Llama-3.1-8B-Instruct-Q5_K_M.gguf!" ) demo.launch()