RichieBurundi commited on
Commit
58861f6
·
verified ·
1 Parent(s): d5b97bf

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +17 -29
app.py CHANGED
@@ -1,40 +1,28 @@
1
  import gradio as gr
2
- from transformers import AutoTokenizer, AutoModelForCausalLM
3
- import sqlite3
4
 
5
- model_name = "Richieburundi/Ariginalmodel"
6
- tokenizer = AutoTokenizer.from_pretrained(model_name)
7
- model = AutoModelForCausalLM.from_pretrained(model_name)
8
 
9
- def read_files_from_db():
10
- conn = sqlite3.connect('Richieburundi/Amxxprogramer')
11
- cursor = conn.cursor()
12
- cursor.execute("SELECT * FROM files")
13
- files = cursor.fetchall()
14
- conn.close()
15
- return files
16
 
17
- def generate_code(input_text):
18
- files = read_files_from_db()
19
-
20
- context = "Available files:\n"
21
- for file in files:
22
- context += f"- {file[1]}\n"
23
-
24
- prompt = f"{context}\nGenerate code based on the following input:\n{input_text}\n"
25
-
26
- inputs = tokenizer(prompt, return_tensors="pt")
27
- outputs = model.generate(**inputs, max_length=500)
28
- generated_code = tokenizer.decode(outputs[0], skip_special_tokens=True)
29
-
30
- return generated_code
31
 
32
  iface = gr.Interface(
33
- fn=generate_code,
34
  inputs="text",
35
  outputs="text",
36
- title="Code Generation AI",
37
- description="Enter your code or instructions, and the AI will generate code based on available files."
38
  )
39
 
40
  iface.launch()
 
1
  import gradio as gr
2
+ from bertopic import BERTopic
 
3
 
4
+ model_name = "RichieBurundi/Ariginalmodel"
 
 
5
 
6
+ try:
7
+ model = BERTopic.load(model_name)
8
+ except Exception as e:
9
+ print(f"Error loading model: {e}")
10
+ # Здесь можно добавить fallback логику или использовать другую модель
 
 
11
 
12
+ def generate_text(input_text):
13
+ try:
14
+ topics, probs = model.transform([input_text])
15
+ generated_text = model.generate_topic_labels(topics[0], probs[0], top_n=1)[0]
16
+ return generated_text
17
+ except Exception as e:
18
+ return f"Error generating text: {e}"
 
 
 
 
 
 
 
19
 
20
  iface = gr.Interface(
21
+ fn=generate_text,
22
  inputs="text",
23
  outputs="text",
24
+ title="Ariginal Model Text Generation",
25
+ description="Enter your text, and the model will generate a response."
26
  )
27
 
28
  iface.launch()