File size: 1,161 Bytes
eef425a
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
import gradio as gr
from transformers import AutoTokenizer, AutoModelForCausalLM
import sqlite3

model_name = "your-model-name"
tokenizer = AutoTokenizer.from_pretrained(model_name)
model = AutoModelForCausalLM.from_pretrained(model_name)

def read_files_from_db():
    conn = sqlite3.connect('your_database.db')
    cursor = conn.cursor()
    cursor.execute("SELECT * FROM files")
    files = cursor.fetchall()
    conn.close()
    return files

def generate_code(input_text):
    files = read_files_from_db()
    
    context = "Available files:\n"
    for file in files:
        context += f"- {file[1]}\n"
    
    prompt = f"{context}\nGenerate code based on the following input:\n{input_text}\n"
    
    inputs = tokenizer(prompt, return_tensors="pt")
    outputs = model.generate(**inputs, max_length=500)
    generated_code = tokenizer.decode(outputs[0], skip_special_tokens=True)
    
    return generated_code

iface = gr.Interface(
    fn=generate_code,
    inputs="text",
    outputs="text",
    title="Code Generation AI",
    description="Enter your code or instructions, and the AI will generate code based on available files."
)

iface.launch()