File size: 7,565 Bytes
886c92a
da7ef42
886c92a
4533086
 
886c92a
c253059
886c92a
b4c693d
 
c23cd74
da7ef42
886c92a
b4c693d
886c92a
b4c693d
ac5ae77
 
5ad49c1
4533086
886c92a
 
 
 
 
 
 
5ad49c1
c23cd74
886c92a
 
 
 
 
 
5ad49c1
886c92a
 
6d14961
5ad49c1
 
886c92a
5ad49c1
886c92a
 
 
 
 
 
 
 
 
 
7319f0c
 
886c92a
 
 
 
5ad49c1
886c92a
 
 
 
 
 
 
 
7319f0c
5ad49c1
 
886c92a
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
5ad49c1
886c92a
5ad49c1
 
 
886c92a
b293b19
5ad49c1
 
886c92a
b293b19
886c92a
 
 
 
 
 
 
 
 
 
 
 
 
b293b19
984f26b
5ad49c1
 
886c92a
 
 
 
 
 
 
 
 
 
 
 
 
 
 
5ad49c1
886c92a
 
7319f0c
 
886c92a
 
 
 
 
 
7319f0c
5ad49c1
 
886c92a
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
10ad7a5
886c92a
 
5ad49c1
886c92a
 
5ad49c1
886c92a
 
5ad49c1
 
886c92a
5ad49c1
 
ccfefd3
c253059
5ad49c1
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
from flask import Flask, request, render_template, jsonify, Response
import json
import os
from google import genai
from google.genai import types
import base64
from werkzeug.utils import secure_filename
import mimetypes
from dotenv import load_dotenv


app = Flask(__name__)
app.config['MAX_CONTENT_LENGTH'] = 16 * 1024 * 1024  # 16MB max file size
load_dotenv()
# Configuration du client Gemini
API_KEY = os.getenv("GOOGLE_API_KEY")
SYSTEM_INSTRUCTION = "Tu es un assistant intelligent et amical nommé Mariam. Tu assistes les utilisateurs au mieux de tes capacités. Tu as été créé par Aenir."

client = genai.Client(api_key=API_KEY)

# Configuration par défaut
MODEL = "gemini-2.5-flash"
DEFAULT_CONFIG = {
    "temperature": 0.7,
    "max_output_tokens": 8192,
    "top_p": 0.9,
    "top_k": 40
}

# Outils activés par défaut
DEFAULT_TOOLS = [
    types.Tool(code_execution=types.ToolCodeExecution()),
    types.Tool(google_search=types.GoogleSearch()),
    types.Tool(url_context=types.UrlContext())
]

# Stockage des conversations (en production, utilisez une base de données)
conversations = {}

@app.route('/')
def index():
    return render_template('index.html')

@app.route('/chat', methods=['POST'])
def chat():
    try:
        data = request.get_json()
        message = data.get('message', '')
        thinking_enabled = data.get('thinking_enabled', True)
        conversation_id = data.get('conversation_id', 'default')
        
        # Configuration du thinking
        config_dict = DEFAULT_CONFIG.copy()
        config_dict["system_instruction"] = SYSTEM_INSTRUCTION
        # <<< FIN DE LA MODIFICATION >>>
        if thinking_enabled:
            config_dict["thinking_config"] = types.ThinkingConfig(
                thinking_budget=-1,  # Dynamic thinking
                include_thoughts=True
            )
        
        config_dict["tools"] = DEFAULT_TOOLS
        generation_config = types.GenerateContentConfig(**config_dict)
        
        # Gestion de la conversation
        if conversation_id not in conversations:
            conversations[conversation_id] = client.chats.create(
                model=MODEL,
            
                config=generation_config
            )
        
        chat = conversations[conversation_id]
        
        # Génération de la réponse avec streaming
        def generate():
            try:
                response_stream = chat.send_message_stream(
                    message,
                    config=generation_config
                )
                
                full_response = ""
                thoughts = ""
                
                for chunk in response_stream:
                    for part in chunk.candidates[0].content.parts:
                        if part.text:
                            if part.thought and thinking_enabled:
                                thoughts += part.text
                                yield f"data: {json.dumps({'type': 'thought', 'content': part.text})}\n\n"
                            else:
                                full_response += part.text
                                yield f"data: {json.dumps({'type': 'text', 'content': part.text})}\n\n"
                
                # Signal de fin
                yield f"data: {json.dumps({'type': 'end'})}\n\n"
                
            except Exception as e:
                yield f"data: {json.dumps({'type': 'error', 'content': str(e)})}\n\n"
        
        return Response(generate(), mimetype='text/plain')
        
    except Exception as e:
        return jsonify({'error': str(e)}), 500

@app.route('/upload', methods=['POST'])
def upload_file():
    try:
        if 'file' not in request.files:
            return jsonify({'error': 'No file uploaded'}), 400
        
        file = request.files['file']
        if file.filename == '':
            return jsonify({'error': 'No file selected'}), 400
        
        # Lire le fichier
        file_bytes = file.read()
        mime_type = file.content_type or mimetypes.guess_type(file.filename)[0]
        
        # Encoder en base64 pour le stockage temporaire
        file_b64 = base64.b64encode(file_bytes).decode()
        
        return jsonify({
            'success': True,
            'filename': file.filename,
            'mime_type': mime_type,
            'data': file_b64
        })
        
    except Exception as e:
        return jsonify({'error': str(e)}), 500

@app.route('/chat_with_file', methods=['POST'])
def chat_with_file():
    try:
        data = request.get_json()
        message = data.get('message', '')
        file_data = data.get('file_data')
        thinking_enabled = data.get('thinking_enabled', True)
        conversation_id = data.get('conversation_id', 'default')
        
        # Configuration du thinking
        config_dict = DEFAULT_CONFIG.copy()
        if thinking_enabled:
            config_dict["thinking_config"] = types.ThinkingConfig(
                thinking_budget=-1,
                include_thoughts=True
            )
        
        config_dict["tools"] = DEFAULT_TOOLS
        config_dict["system_instruction"] = SYSTEM_INSTRUCTION
        # <<< FIN DE LA MODIFICATION >>>
        generation_config = types.GenerateContentConfig(**config_dict)
        
        # Gestion de la conversation
        if conversation_id not in conversations:
            conversations[conversation_id] = client.chats.create(
                model=MODEL,
                
                config=generation_config
            )
        
        chat = conversations[conversation_id]
        
        # Préparation du contenu avec fichier
        contents = [message]
        
        if file_data:
            file_bytes = base64.b64decode(file_data['data'])
            file_part = types.Part.from_bytes(
                data=file_bytes,
                mime_type=file_data['mime_type']
            )
            contents.append(file_part)
        
        # Génération de la réponse avec streaming
        def generate():
            try:
                response_stream = chat.send_message_stream(
                    contents,
                    config=generation_config
                )
                
                for chunk in response_stream:
                    for part in chunk.candidates[0].content.parts:
                        if part.text:
                            if part.thought and thinking_enabled:
                                yield f"data: {json.dumps({'type': 'thought', 'content': part.text})}\n\n"
                            else:
                                yield f"data: {json.dumps({'type': 'text', 'content': part.text})}\n\n"
                
                yield f"data: {json.dumps({'type': 'end'})}\n\n"
                
            except Exception as e:
                yield f"data: {json.dumps({'type': 'error', 'content': str(e)})}\n\n"
        
        return Response(generate(), mimetype='text/plain')
        
    except Exception as e:
        return jsonify({'error': str(e)}), 500

@app.route('/reset_conversation', methods=['POST'])
def reset_conversation():
    try:
        data = request.get_json()
        conversation_id = data.get('conversation_id', 'default')
        
        if conversation_id in conversations:
            del conversations[conversation_id]
        
        return jsonify({'success': True})
        
    except Exception as e:
        return jsonify({'error': str(e)}), 500

if __name__ == '__main__':
    app.run(debug=True, host='0.0.0.0', port=5000)