|
|
from flask import Flask, request, render_template, jsonify, Response |
|
|
import json |
|
|
import os |
|
|
from google import genai |
|
|
from google.genai import types |
|
|
import base64 |
|
|
from werkzeug.utils import secure_filename |
|
|
import mimetypes |
|
|
|
|
|
app = Flask(__name__) |
|
|
app.config['MAX_CONTENT_LENGTH'] = 16 * 1024 * 1024 |
|
|
|
|
|
|
|
|
API_KEY = "AIzaSyAMYpF67aqFnWDJESWOx1dC-w3sEU29VcM" |
|
|
client = genai.Client(api_key=API_KEY) |
|
|
|
|
|
|
|
|
MODEL = "gemini-2.5-flash" |
|
|
DEFAULT_CONFIG = { |
|
|
"temperature": 0.7, |
|
|
"max_output_tokens": 8192, |
|
|
"top_p": 0.9, |
|
|
"top_k": 40 |
|
|
} |
|
|
|
|
|
|
|
|
DEFAULT_TOOLS = [ |
|
|
types.Tool(code_execution=types.ToolCodeExecution()), |
|
|
types.Tool(google_search=types.GoogleSearch()), |
|
|
types.Tool(url_context=types.UrlContext()) |
|
|
] |
|
|
|
|
|
|
|
|
conversations = {} |
|
|
|
|
|
@app.route('/') |
|
|
def index(): |
|
|
return render_template('index.html') |
|
|
|
|
|
@app.route('/chat', methods=['POST']) |
|
|
def chat(): |
|
|
try: |
|
|
data = request.get_json() |
|
|
message = data.get('message', '') |
|
|
thinking_enabled = data.get('thinking_enabled', True) |
|
|
conversation_id = data.get('conversation_id', 'default') |
|
|
|
|
|
|
|
|
config_dict = DEFAULT_CONFIG.copy() |
|
|
if thinking_enabled: |
|
|
config_dict["thinking_config"] = types.ThinkingConfig( |
|
|
thinking_budget=-1, |
|
|
include_thoughts=True |
|
|
) |
|
|
|
|
|
config_dict["tools"] = DEFAULT_TOOLS |
|
|
generation_config = types.GenerateContentConfig(**config_dict) |
|
|
|
|
|
|
|
|
if conversation_id not in conversations: |
|
|
conversations[conversation_id] = client.chats.create( |
|
|
model=MODEL, |
|
|
config=generation_config |
|
|
) |
|
|
|
|
|
chat = conversations[conversation_id] |
|
|
|
|
|
|
|
|
def generate(): |
|
|
try: |
|
|
response_stream = chat.send_message_stream( |
|
|
message, |
|
|
config=generation_config |
|
|
) |
|
|
|
|
|
full_response = "" |
|
|
thoughts = "" |
|
|
|
|
|
for chunk in response_stream: |
|
|
for part in chunk.candidates[0].content.parts: |
|
|
if part.text: |
|
|
if part.thought and thinking_enabled: |
|
|
thoughts += part.text |
|
|
yield f"data: {json.dumps({'type': 'thought', 'content': part.text})}\n\n" |
|
|
else: |
|
|
full_response += part.text |
|
|
yield f"data: {json.dumps({'type': 'text', 'content': part.text})}\n\n" |
|
|
|
|
|
|
|
|
yield f"data: {json.dumps({'type': 'end'})}\n\n" |
|
|
|
|
|
except Exception as e: |
|
|
yield f"data: {json.dumps({'type': 'error', 'content': str(e)})}\n\n" |
|
|
|
|
|
return Response(generate(), mimetype='text/plain') |
|
|
|
|
|
except Exception as e: |
|
|
return jsonify({'error': str(e)}), 500 |
|
|
|
|
|
@app.route('/upload', methods=['POST']) |
|
|
def upload_file(): |
|
|
try: |
|
|
if 'file' not in request.files: |
|
|
return jsonify({'error': 'No file uploaded'}), 400 |
|
|
|
|
|
file = request.files['file'] |
|
|
if file.filename == '': |
|
|
return jsonify({'error': 'No file selected'}), 400 |
|
|
|
|
|
|
|
|
file_bytes = file.read() |
|
|
mime_type = file.content_type or mimetypes.guess_type(file.filename)[0] |
|
|
|
|
|
|
|
|
file_b64 = base64.b64encode(file_bytes).decode() |
|
|
|
|
|
return jsonify({ |
|
|
'success': True, |
|
|
'filename': file.filename, |
|
|
'mime_type': mime_type, |
|
|
'data': file_b64 |
|
|
}) |
|
|
|
|
|
except Exception as e: |
|
|
return jsonify({'error': str(e)}), 500 |
|
|
|
|
|
@app.route('/chat_with_file', methods=['POST']) |
|
|
def chat_with_file(): |
|
|
try: |
|
|
data = request.get_json() |
|
|
message = data.get('message', '') |
|
|
file_data = data.get('file_data') |
|
|
thinking_enabled = data.get('thinking_enabled', True) |
|
|
conversation_id = data.get('conversation_id', 'default') |
|
|
|
|
|
|
|
|
config_dict = DEFAULT_CONFIG.copy() |
|
|
if thinking_enabled: |
|
|
config_dict["thinking_config"] = types.ThinkingConfig( |
|
|
thinking_budget=-1, |
|
|
include_thoughts=True |
|
|
) |
|
|
|
|
|
config_dict["tools"] = DEFAULT_TOOLS |
|
|
generation_config = types.GenerateContentConfig(**config_dict) |
|
|
|
|
|
|
|
|
if conversation_id not in conversations: |
|
|
conversations[conversation_id] = client.chats.create( |
|
|
model=MODEL, |
|
|
config=generation_config |
|
|
) |
|
|
|
|
|
chat = conversations[conversation_id] |
|
|
|
|
|
|
|
|
contents = [message] |
|
|
|
|
|
if file_data: |
|
|
file_bytes = base64.b64decode(file_data['data']) |
|
|
file_part = types.Part.from_bytes( |
|
|
data=file_bytes, |
|
|
mime_type=file_data['mime_type'] |
|
|
) |
|
|
contents.append(file_part) |
|
|
|
|
|
|
|
|
def generate(): |
|
|
try: |
|
|
response_stream = chat.send_message_stream( |
|
|
contents, |
|
|
config=generation_config |
|
|
) |
|
|
|
|
|
for chunk in response_stream: |
|
|
for part in chunk.candidates[0].content.parts: |
|
|
if part.text: |
|
|
if part.thought and thinking_enabled: |
|
|
yield f"data: {json.dumps({'type': 'thought', 'content': part.text})}\n\n" |
|
|
else: |
|
|
yield f"data: {json.dumps({'type': 'text', 'content': part.text})}\n\n" |
|
|
|
|
|
yield f"data: {json.dumps({'type': 'end'})}\n\n" |
|
|
|
|
|
except Exception as e: |
|
|
yield f"data: {json.dumps({'type': 'error', 'content': str(e)})}\n\n" |
|
|
|
|
|
return Response(generate(), mimetype='text/plain') |
|
|
|
|
|
except Exception as e: |
|
|
return jsonify({'error': str(e)}), 500 |
|
|
|
|
|
@app.route('/reset_conversation', methods=['POST']) |
|
|
def reset_conversation(): |
|
|
try: |
|
|
data = request.get_json() |
|
|
conversation_id = data.get('conversation_id', 'default') |
|
|
|
|
|
if conversation_id in conversations: |
|
|
del conversations[conversation_id] |
|
|
|
|
|
return jsonify({'success': True}) |
|
|
|
|
|
except Exception as e: |
|
|
return jsonify({'error': str(e)}), 500 |
|
|
|
|
|
if __name__ == '__main__': |
|
|
app.run(debug=True, host='0.0.0.0', port=5000) |