from flask import Flask, request, render_template, jsonify import PIL.Image import google.generativeai as genai import os from tempfile import NamedTemporaryFile from gradio_client import Client, handle_file # Importez gradio_client app = Flask(__name__) # Configuration de Gemini generation_config = { "temperature": 1, "max_output_tokens": 8192, } safety_settings = [ {"category": "HARM_CATEGORY_HARASSMENT", "threshold": "BLOCK_NONE"}, {"category": "HARM_CATEGORY_HATE_SPEECH", "threshold": "BLOCK_NONE"}, {"category": "HARM_CATEGORY_SEXUALLY_EXPLICIT", "threshold": "BLOCK_NONE"}, {"category": "HARM_CATEGORY_DANGEROUS_CONTENT", "threshold": "BLOCK_NONE"}, ] GOOGLE_API_KEY = os.environ.get("TOKEN") genai.configure(api_key=GOOGLE_API_KEY) # Fonction pour interroger Gemini def query_gemini(image_path, prompt="Résous ce problème mathématiques. Je veux qu'en réponse tu me donnes un rendu complet en utilisant du Latex."): img = PIL.Image.open(image_path) model = genai.GenerativeModel( model_name="gemini-1.5-pro-002", generation_config=generation_config, safety_settings=safety_settings ) try: response = model.generate_content([prompt, img], request_options={"timeout": 600}) return response.text except Exception as e: return str(e) # Fonction pour interroger Qwen2 def query_qwen2(image_path, question="Résous ce problème mathématiques. Donne la réponse en utilisant LaTeX."): try: client = Client("Qwen/Qwen2-Math-Demo") result = client.predict( image=handle_file(image_path), sketchpad=None, question=question, api_name="/math_chat_bot" ) return result except Exception as e: return str(e) @app.route('/') def index(): return render_template('math.html') @app.route('/upload', methods=['POST']) def upload_image(): if 'image' not in request.files: return jsonify({'error': 'Aucune image fournie'}), 400 file = request.files['image'] model_choice = request.form.get('model_choice', 'gemini') # Obtient le choix du modèle if file.filename == '': return jsonify({'error': 'Aucun fichier sélectionné'}), 400 with NamedTemporaryFile(delete=False) as temp_file: file.save(temp_file.name) try: if model_choice == "mariam's": result = query_gemini(temp_file.name) else: result = query_qwen2(temp_file.name) os.unlink(temp_file.name) return jsonify({'result': result, 'model': model_choice}) except Exception as e: return jsonify({'error': str(e)}), 500