Spaces:
Sleeping
Sleeping
import gradio as gr | |
from transformers import AutoTokenizer, AutoModelForCausalLM | |
import sqlite3 | |
model_name = "your-model-name" | |
tokenizer = AutoTokenizer.from_pretrained(model_name) | |
model = AutoModelForCausalLM.from_pretrained(model_name) | |
def read_files_from_db(): | |
conn = sqlite3.connect('your_database.db') | |
cursor = conn.cursor() | |
cursor.execute("SELECT * FROM files") | |
files = cursor.fetchall() | |
conn.close() | |
return files | |
def generate_code(input_text): | |
files = read_files_from_db() | |
context = "Available files:\n" | |
for file in files: | |
context += f"- {file[1]}\n" | |
prompt = f"{context}\nGenerate code based on the following input:\n{input_text}\n" | |
inputs = tokenizer(prompt, return_tensors="pt") | |
outputs = model.generate(**inputs, max_length=500) | |
generated_code = tokenizer.decode(outputs[0], skip_special_tokens=True) | |
return generated_code | |
iface = gr.Interface( | |
fn=generate_code, | |
inputs="text", | |
outputs="text", | |
title="Code Generation AI", | |
description="Enter your code or instructions, and the AI will generate code based on available files." | |
) | |
iface.launch() |