cstr commited on
Commit
895ad65
·
verified ·
1 Parent(s): a077d87

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +41 -57
app.py CHANGED
@@ -201,54 +201,46 @@ def build_prompts(snippets: List[str], prompt_instruction: str, custom_prompt: O
201
 
202
  return "\n\n".join(prompts)
203
 
204
- def send_to_model(*args, **kwargs):
205
- try:
206
- with gr.Progress() as progress:
207
- progress(0, "Preparing to send to model...")
208
- result = send_to_model_impl(*args, **kwargs)
209
- progress(1, "Complete!")
210
- return result
211
- except Exception as e:
212
- return f"Error: {str(e)}", None
213
 
214
  def send_to_model_impl(prompt, model_selection, hf_model_choice, hf_custom_model, hf_api_key,
215
- groq_model_choice, groq_api_key, openai_api_key):
216
- try:
217
- if model_selection == "Clipboard only":
218
- return "Use copy/paste for processing", []
219
-
220
- if model_selection == "HuggingFace Inference":
221
- if not hf_api_key:
222
- return "Error: HuggingFace API key required", []
223
- if not hf_model_choice:
224
- return "Error: Select a HuggingFace model", []
225
- model_id = hf_custom_model if hf_model_choice == "Custom Model" else model_registry.hf_models[hf_model_choice]
226
- summary = send_to_hf_inference(prompt, model_id, hf_api_key)
227
-
228
- elif model_selection == "Groq API":
229
- if not groq_api_key:
230
- return "Error: Groq API key required", []
231
- if not groq_model_choice:
232
- return "Error: Select a Groq model", []
233
- summary = send_to_groq(prompt, groq_model_choice, groq_api_key)
234
-
235
- elif model_selection == "OpenAI ChatGPT":
236
- if not openai_api_key:
237
- return "Error: OpenAI API key required", []
238
- summary = send_to_openai(prompt, openai_api_key)
239
-
240
- else:
241
- return "Error: Invalid model selection", []
242
-
243
- # Save summary for download
244
- with tempfile.NamedTemporaryFile(delete=False, mode='w', suffix='.txt') as f:
245
- f.write(summary)
246
-
247
- return summary, download_file # Return the file for download_summary
248
- except Exception as e:
249
- error_msg = f"Error processing request: {str(e)}"
250
- logging.error(error_msg)
251
- return error_msg, []
252
 
253
  def send_to_hf_inference(prompt: str, model_name: str, api_key: str) -> str:
254
  try:
@@ -394,8 +386,6 @@ with gr.Blocks(css="""
394
  .gradio-container {max-width: 90%; margin: 0 auto;}
395
  @media (max-width: 768px) {.gradio-container {max-width: 98%; padding: 10px;} .gr-row {flex-direction: column;} .gr-col {width: 100%; margin-bottom: 10px;}}
396
  """) as demo:
397
- gr.Markdown("# 📄 Smart PDF Summarizer")
398
-
399
  # State variables
400
  pdf_content = gr.State("")
401
  snippets = gr.State([])
@@ -815,16 +805,10 @@ with gr.Blocks(css="""
815
  send_to_model_btn.click(
816
  send_to_model,
817
  inputs=[
818
- generated_prompt,
819
- model_choice,
820
- hf_model,
821
- hf_custom_model,
822
- hf_api_key,
823
- groq_model,
824
- groq_api_key,
825
- openai_api_key
826
  ],
827
- outputs=[summary_output, download_summary]
828
  )
829
 
830
  groq_refresh_btn.click(
 
201
 
202
  return "\n\n".join(prompts)
203
 
204
+ def send_to_model(*args, **kwargs): # Correct the outputs here
205
+ try:
206
+ with gr.Progress() as progress:
207
+ progress(0, "Preparing to send to model...")
208
+ summary, download_file = send_to_model_impl(*args, **kwargs) # Get both outputs
209
+ progress(1, "Complete!")
210
+ return summary, download_file # Return both outputs
211
+ except Exception as e:
212
+ return f"Error: {str(e)}", None # Return error message and None for the file
213
 
214
  def send_to_model_impl(prompt, model_selection, hf_model_choice, hf_custom_model, hf_api_key,
215
+ groq_model_choice, groq_api_key, openai_api_key, openai_model_choice): # Added openai_model_choice
216
+ try:
217
+ if model_selection == "Clipboard only":
218
+ return "Use copy/paste for processing", None
219
+
220
+ if model_selection == "HuggingFace Inference":
221
+ # ... (Existing logic)
222
+
223
+ elif model_selection == "Groq API":
224
+ # ... (Existing logic)
225
+
226
+ elif model_selection == "OpenAI ChatGPT":
227
+ if not openai_api_key:
228
+ return "Error: OpenAI API key required", None
229
+ summary = send_to_openai(prompt, openai_api_key, model=openai_model_choice) # Use openai_model_choice
230
+
231
+ else:
232
+ return "Error: Invalid model selection", None
233
+
234
+ with tempfile.NamedTemporaryFile(delete=False, mode='w', suffix='.txt') as f:
235
+ f.write(summary)
236
+ download_file = f.name
237
+
238
+ return summary, download_file
239
+
240
+ except Exception as e:
241
+ error_msg = f"Error processing request: {str(e)}"
242
+ logging.error(error_msg)
243
+ return error_msg, None # Return the error message
 
 
 
 
 
 
 
 
244
 
245
  def send_to_hf_inference(prompt: str, model_name: str, api_key: str) -> str:
246
  try:
 
386
  .gradio-container {max-width: 90%; margin: 0 auto;}
387
  @media (max-width: 768px) {.gradio-container {max-width: 98%; padding: 10px;} .gr-row {flex-direction: column;} .gr-col {width: 100%; margin-bottom: 10px;}}
388
  """) as demo:
 
 
389
  # State variables
390
  pdf_content = gr.State("")
391
  snippets = gr.State([])
 
805
  send_to_model_btn.click(
806
  send_to_model,
807
  inputs=[
808
+ generated_prompt, model_choice, hf_model, hf_custom_model, hf_api_key,
809
+ groq_model, groq_api_key, openai_api_key, openai_model # Add openai_model as input
 
 
 
 
 
 
810
  ],
811
+ outputs=[summary_output, download_summary] # Correct outputs
812
  )
813
 
814
  groq_refresh_btn.click(