import gradio as gr import spaces from huggingface_hub import InferenceClient from transformers import AutoTokenizer, AutoModelForCausalLM tokenizer = AutoTokenizer.from_pretrained("Svngoku/c4ai-command-r7b-12-2024-4bit") model = AutoModelForCausalLM.from_pretrained("Svngoku/c4ai-command-r7b-12-2024-4bit") """ For more information on `huggingface_hub` Inference API support, please check the docs: https://huggingface.co/docs/huggingface_hub/v0.22.2/en/guides/inference """ client = InferenceClient("Svngoku/c4ai-command-r7b-12-2024-4bit") def wrap_text_output(text): wrapped_text = f"<|START_OF_TURN_TOKEN|><|SYSTEM_TOKEN|><|START_OF_TURN_TOKEN|><|USER_TOKEN|>{text}<|START_OF_TURN_TOKEN|><|CHATBOT_TOKEN|>" return wrapped_text @spaces.GPU def generate_response(message, history, documents_text): conversation = history + [{"role": "user", "content": message}] documents = [] for doc in documents_text.split('\n'): if doc.strip(): try: heading, body = doc.split(':', 1) documents.append({"heading": heading.strip(), "body": body.strip()}) except ValueError: print(f"Invalid document format: {doc}") input_prompt = tokenizer.apply_chat_template( conversation=conversation, documents=documents, tokenize=False, add_generation_prompt=True, return_tensors="pt", ) input_ids = tokenizer(input_prompt, return_tensors="pt").input_ids input_ids = input_ids.to(model.device) gen_tokens = model.generate( input_ids, max_new_tokens=2048, do_sample=True, temperature=0.3 ) gen_text = tokenizer.decode(gen_tokens[0], skip_special_tokens=True) # Extract chatbot's response chatbot_response = gen_text.split("<|CHATBOT_TOKEN|>")[-1] return chatbot_response """ For information on how to customize the ChatInterface, peruse the gradio docs: https://www.gradio.app/docs/chatinterface """ demo = gr.ChatInterface( generate_response, additional_inputs=[ gr.Textbox(value="You are a friendly Chatbot.", label="System message"), gr.Slider(minimum=1, maximum=2048, value=512, step=1, label="Max new tokens"), gr.Slider(minimum=0.1, maximum=4.0, value=0.7, step=0.1, label="Temperature"), gr.Slider( minimum=0.1, maximum=1.0, value=0.95, step=0.05, label="Top-p (nucleus sampling)", ), ], ) if __name__ == "__main__": demo.launch()