AliInamdar commited on
Commit
de1c670
Β·
verified Β·
1 Parent(s): 1e74593

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +76 -49
app.py CHANGED
@@ -1,64 +1,91 @@
1
  import gradio as gr
2
- from huggingface_hub import InferenceClient
 
 
 
 
 
3
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
4
  """
5
- For more information on `huggingface_hub` Inference API support, please check the docs: https://huggingface.co/docs/huggingface_hub/v0.22.2/en/guides/inference
6
- """
7
- client = InferenceClient("HuggingFaceH4/zephyr-7b-beta")
8
 
 
 
 
 
 
 
 
 
 
 
 
9
 
10
- def respond(
11
- message,
12
- history: list[tuple[str, str]],
13
- system_message,
14
- max_tokens,
15
- temperature,
16
- top_p,
17
- ):
18
- messages = [{"role": "system", "content": system_message}]
19
 
20
- for val in history:
21
- if val[0]:
22
- messages.append({"role": "user", "content": val[0]})
23
- if val[1]:
24
- messages.append({"role": "assistant", "content": val[1]})
 
 
 
25
 
26
- messages.append({"role": "user", "content": message})
 
 
 
 
27
 
28
- response = ""
 
 
 
 
29
 
30
- for message in client.chat_completion(
31
- messages,
32
- max_tokens=max_tokens,
33
- stream=True,
34
- temperature=temperature,
35
- top_p=top_p,
36
- ):
37
- token = message.choices[0].delta.content
38
 
39
- response += token
40
- yield response
 
 
 
41
 
 
 
 
42
 
43
- """
44
- For information on how to customize the ChatInterface, peruse the gradio docs: https://www.gradio.app/docs/chatinterface
45
- """
46
- demo = gr.ChatInterface(
47
- respond,
48
- additional_inputs=[
49
- gr.Textbox(value="You are a friendly Chatbot.", label="System message"),
50
- gr.Slider(minimum=1, maximum=2048, value=512, step=1, label="Max new tokens"),
51
- gr.Slider(minimum=0.1, maximum=4.0, value=0.7, step=0.1, label="Temperature"),
52
- gr.Slider(
53
- minimum=0.1,
54
- maximum=1.0,
55
- value=0.95,
56
- step=0.05,
57
- label="Top-p (nucleus sampling)",
58
- ),
59
- ],
60
- )
61
 
 
 
62
 
 
63
  if __name__ == "__main__":
64
- demo.launch()
 
1
  import gradio as gr
2
+ import pandas as pd
3
+ import duckdb
4
+ import requests
5
+ import re
6
+ import os
7
+ from io import BytesIO
8
 
9
+ # πŸ” Together API Key (set this securely in Hugging Face Spaces)
10
+ TOGETHER_API_KEY = os.getenv("TOGETHER_API_KEY") # Must be set in the environment variables
11
+
12
+ # πŸ”§ Global DataFrame holder
13
+ df = None
14
+
15
+ # 🧠 Generate SQL using Together API
16
+ def generate_sql_from_prompt(prompt, df):
17
+ schema = ", ".join([f"{col} ({str(dtype)})" for col, dtype in df.dtypes.items()])
18
+ full_prompt = f"""
19
+ You are a SQL expert. Here is a table called 'df' with the following schema:
20
+ {schema}
21
+
22
+ User question: "{prompt}"
23
+
24
+ Write a valid SQL query using the 'df' table. Return only the SQL code.
25
  """
 
 
 
26
 
27
+ url = "https://api.together.xyz/v1/chat/completions"
28
+ headers = {
29
+ "Authorization": f"Bearer {TOGETHER_API_KEY}",
30
+ "Content-Type": "application/json"
31
+ }
32
+ payload = {
33
+ "model": "mistralai/Mixtral-8x7B-Instruct-v0.1",
34
+ "messages": [{"role": "user", "content": full_prompt}],
35
+ "temperature": 0.2,
36
+ "max_tokens": 200
37
+ }
38
 
39
+ response = requests.post(url, headers=headers, json=payload)
40
+ response.raise_for_status()
41
+ result = response.json()
42
+ return result['choices'][0]['message']['content'].strip("```sql").strip("```").strip()
 
 
 
 
 
43
 
44
+ # 🧽 Clean SQL for DuckDB
45
+ def clean_sql_for_duckdb(sql, df_columns):
46
+ sql = sql.replace("`", '"')
47
+ for col in df_columns:
48
+ if " " in col and f'"{col}"' not in sql:
49
+ pattern = r'\b' + re.escape(col) + r'\b'
50
+ sql = re.sub(pattern, f'"{col}"', sql)
51
+ return sql
52
 
53
+ # πŸ“¦ Handle Excel upload
54
+ def upload_excel(file):
55
+ global df
56
+ df = pd.read_excel(BytesIO(file.read()))
57
+ return f"βœ… Loaded file with shape {df.shape}"
58
 
59
+ # πŸ’¬ Handle chat prompt
60
+ def handle_chat(prompt):
61
+ global df
62
+ if df is None:
63
+ return "❌ Please upload an Excel file first.", pd.DataFrame()
64
 
65
+ try:
66
+ sql = generate_sql_from_prompt(prompt, df)
67
+ cleaned_sql = clean_sql_for_duckdb(sql, df.columns)
68
+ result_df = duckdb.query(cleaned_sql).to_df()
69
+ return f"πŸ“œ Generated SQL:\n{sql}", result_df
70
+ except Exception as e:
71
+ return f"❌ Error: {e}", pd.DataFrame()
 
72
 
73
+ # 🎨 Gradio UI
74
+ with gr.Blocks() as demo:
75
+ gr.Markdown("# πŸ€– SQL Chatbot with Together API + DuckDB")
76
+ file_uploader = gr.File(label="πŸ“‚ Upload Excel File", file_types=[".xlsx"])
77
+ upload_status = gr.Textbox(label="πŸ“„ File Status", interactive=False)
78
 
79
+ with gr.Row():
80
+ prompt_box = gr.Textbox(label="πŸ’¬ Ask your Question", placeholder="e.g., What is the total revenue?")
81
+ generate_btn = gr.Button("πŸš€ Generate SQL")
82
 
83
+ sql_output = gr.Textbox(label="πŸ“œ SQL Query")
84
+ result_table = gr.Dataframe(label="πŸ“Š Query Result")
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
85
 
86
+ file_uploader.change(upload_excel, inputs=file_uploader, outputs=upload_status)
87
+ generate_btn.click(fn=handle_chat, inputs=prompt_box, outputs=[sql_output, result_table])
88
 
89
+ # 🏁 Launch app
90
  if __name__ == "__main__":
91
+ demo.launch()