dhanvanth183 commited on
Commit
df0d3fe
·
verified ·
1 Parent(s): d66a587

Upload 3 files

Browse files

This version uses llms thrice.
1. To generate the questions from a small context.
2. To generate a prompt based on context and the question and answers.
3. To generate the personalized invites as per the designed user prompt.

Files changed (2) hide show
  1. app.py +454 -109
  2. groq_llms.py +225 -0
app.py CHANGED
@@ -1,109 +1,454 @@
1
- import streamlit as st
2
- import pandas as pd
3
- #from openai_llms import LLMHandler
4
- from Groq_llms import LLMHandler
5
- import tempfile
6
- import os
7
-
8
- # Load environment variables for OpenAI API Key
9
- from dotenv import load_dotenv
10
-
11
- load_dotenv()
12
-
13
- # Initialize LLMHandler
14
- llm_handler = LLMHandler()
15
-
16
-
17
- def process_csv(file, user_prompt):
18
- """Read CSV, generate responses using LLMHandler, and return processed DataFrame."""
19
- df = pd.read_csv(file)
20
- responses = []
21
-
22
- for _, row in df.iterrows():
23
- try:
24
- response = llm_handler.generate_response(user_prompt, row.to_dict())
25
- responses.append(response)
26
- except Exception as e:
27
- responses.append(f"Error: {e}")
28
-
29
- df["Generated Text"] = responses
30
- return df
31
-
32
-
33
- # Streamlit UI
34
- st.set_page_config(page_title="Invitation Generator", page_icon="💬", layout="wide")
35
-
36
- # Header
37
- st.title("Invite AI")
38
- st.markdown(
39
- """
40
- Welcome to the Invitation Generator! This tool helps you create personalized invitations using the power of AI.
41
- Follow the steps below to upload your data and generate professional invitation texts.
42
- """
43
- )
44
-
45
- # Section: Template Download and Instructions
46
- st.sidebar.title("Instructions")
47
- st.sidebar.markdown(
48
- """
49
- ### Template Download
50
- [Click here to download the suggested CSV template](http://surl.li/ptvzzv) 📥
51
-
52
- ### Required Fields
53
- - **Unique Identifier for each receiver**
54
- - **Name of the receiver**
55
- - **Designation/Job title of the receiver**
56
- - **Company/Organisation where the receiver works**
57
- - **Areas the receiver is interested in / has expertise in**
58
- - **Categorize receivers into groups** for consistent instructions.
59
- """
60
- )
61
-
62
- # Main Section
63
- st.markdown(
64
- """
65
- ### Steps to Use
66
- 1. **Download the template** from the sidebar.
67
- 2. **Fill out the details** as per the instructions.
68
- 3. **Upload the completed CSV file** below.
69
- 4. Enter a **prompt** to generate personalized invitations.
70
- """
71
- )
72
-
73
- # File uploader for CSV
74
- uploaded_file = st.file_uploader("📂 Upload CSV File", type=["csv"])
75
- user_prompt = st.text_area(
76
- "✍️ Enter the prompt for generating invitation texts:",
77
- "Write professional invitation text tailored for the group.",
78
- height=150,
79
- )
80
-
81
- # Processing and displaying results
82
- if uploaded_file is not None and user_prompt:
83
- st.write("⏳ Processing your file... Please wait.")
84
- processed_df = process_csv(uploaded_file, user_prompt)
85
-
86
- # Display results
87
- st.write("### Generated Invitations")
88
- st.dataframe(processed_df, use_container_width=True)
89
-
90
- # Option to download the processed CSV
91
- with tempfile.NamedTemporaryFile(delete=False, suffix=".csv") as temp_file:
92
- processed_df.to_csv(temp_file.name, index=False)
93
- temp_file.close() # Ensure the file is properly closed before proceeding
94
-
95
- st.download_button(
96
- label="📥 Download Results CSV",
97
- data=open(temp_file.name, "rb"),
98
- file_name="generated_invitations.csv",
99
- mime="text/csv",
100
- )
101
-
102
- # Safely delete the temporary file
103
- os.unlink(temp_file.name)
104
-
105
- # Footer
106
- st.markdown("---")
107
- st.markdown(
108
- "💡 **Tip:** Ensure your data aligns with the provided template for accurate results."
109
- )
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import streamlit as st
2
+ import pandas as pd
3
+ from groq_llms import LLMHandler
4
+ import tempfile
5
+ import os
6
+ from dotenv import load_dotenv
7
+
8
+ load_dotenv()
9
+
10
+ # Initialize LLMHandler
11
+ llm_handler = LLMHandler()
12
+
13
+
14
+ def process_csv(file, user_prompt):
15
+ """Read CSV, generate responses using LLMHandler, and return processed DataFrame."""
16
+ df = pd.read_csv(file)
17
+ responses = []
18
+
19
+ for _, row in df.iterrows():
20
+ try:
21
+ response = llm_handler.generate_response(user_prompt, row.to_dict())
22
+ responses.append(response)
23
+ except Exception as e:
24
+ responses.append(f"Error: {e}")
25
+
26
+ df["Generated Text"] = responses
27
+ return df
28
+
29
+
30
+ def initialize_session_state():
31
+ """Initialize session state variables"""
32
+ if 'prompt_creation_method' not in st.session_state:
33
+ st.session_state.prompt_creation_method = None
34
+ if 'current_step' not in st.session_state:
35
+ st.session_state.current_step = 'choose_method'
36
+ if 'context' not in st.session_state:
37
+ st.session_state.context = ""
38
+ if 'questions' not in st.session_state:
39
+ st.session_state.questions = []
40
+ if 'answers' not in st.session_state:
41
+ st.session_state.answers = {}
42
+ if 'multiselect_answers' not in st.session_state:
43
+ st.session_state.multiselect_answers = {}
44
+ if 'custom_options' not in st.session_state:
45
+ st.session_state.custom_options = {}
46
+ if 'final_prompt' not in st.session_state:
47
+ st.session_state.final_prompt = ""
48
+ if 'direct_prompt' not in st.session_state:
49
+ st.session_state.direct_prompt = ""
50
+
51
+
52
+ def display_progress_tracker():
53
+ """Display current progress and previous responses"""
54
+ with st.expander("📋 View Progress", expanded=True):
55
+ if st.session_state.prompt_creation_method:
56
+ st.write(f"**Method chosen:** {st.session_state.prompt_creation_method.title()}")
57
+
58
+ if st.session_state.context:
59
+ st.write("**Initial Context:**")
60
+ st.info(st.session_state.context)
61
+ if st.button("Edit Context", key="edit_context"):
62
+ st.session_state.current_step = 'initial_context'
63
+ st.rerun()
64
+
65
+ if st.session_state.answers:
66
+ st.write("**Your Responses:**")
67
+ for i, question in enumerate(st.session_state.questions):
68
+ if i in st.session_state.multiselect_answers:
69
+ answers = ", ".join(st.session_state.multiselect_answers[i])
70
+ st.success(f"Q: {question['question']}\nA: {answers}")
71
+ elif i in st.session_state.answers:
72
+ st.success(f"Q: {question['question']}\nA: {st.session_state.answers[i]}")
73
+ if st.button("Edit Responses", key="edit_responses"):
74
+ st.session_state.current_step = 'answer_questions'
75
+ st.rerun()
76
+
77
+ if st.session_state.direct_prompt:
78
+ st.write("**Your Direct Prompt:**")
79
+ st.info(st.session_state.direct_prompt)
80
+ if st.button("Edit Prompt", key="edit_direct_prompt"):
81
+ st.session_state.current_step = 'direct_prompt'
82
+ st.rerun()
83
+
84
+ if st.session_state.final_prompt:
85
+ st.write("**Final Generated Prompt:**")
86
+ st.info(st.session_state.final_prompt)
87
+ if st.button("Edit Final Prompt", key="edit_final_prompt"):
88
+ st.session_state.current_step = 'edit_prompt'
89
+ st.rerun()
90
+
91
+
92
+ # Streamlit UI
93
+ st.set_page_config(page_title="Invite AI", page_icon="💬", layout="wide")
94
+
95
+ # Header
96
+ st.title("Invite AI")
97
+ st.markdown(
98
+ """
99
+ Welcome to the Invitation Generator! This tool helps you create personalized invitations using the power of AI.
100
+ """
101
+ )
102
+
103
+ # Initialize session state
104
+ initialize_session_state()
105
+
106
+ # Display progress tracker (always visible)
107
+ display_progress_tracker()
108
+
109
+ # Sidebar with instructions
110
+ st.sidebar.title("Instructions")
111
+ st.sidebar.markdown(
112
+ """
113
+ ### Template Download
114
+ [Click here to download the suggested CSV template](http://surl.li/ptvzzv) 📥
115
+ ### Suggested Requirements
116
+ - **Unique Identifier for each receiver**
117
+ - **Name of the receiver**
118
+ - **Designation/Job title of the receiver**
119
+ - **Company/Organisation where the receiver works**
120
+ - **Areas the receiver is interested in / has expertise in**
121
+ - **Categorize receivers into groups**
122
+
123
+ [Note: The above template is for your reference, you are free to submit your own data.]
124
+ """
125
+ )
126
+
127
+ # Main content area with steps
128
+ st.markdown("---") # Separator between progress tracker and current step
129
+
130
+ if st.session_state.current_step == 'choose_method':
131
+ st.subheader("Choose Your Prompt Creation Method")
132
+
133
+ col1, col2 = st.columns(2)
134
+
135
+ with col1:
136
+ st.markdown("""
137
+ ### Guided Prompt Builder
138
+ - Step-by-step assistance
139
+ - AI-generated questions
140
+ - Structured approach
141
+ """)
142
+ if st.button("Use Guided Builder"):
143
+ st.session_state.prompt_creation_method = 'guided'
144
+ st.session_state.current_step = 'initial_context'
145
+ st.rerun()
146
+
147
+ with col2:
148
+ st.markdown("""
149
+ ### Direct Prompt Entry
150
+ - Write your own prompt
151
+ - Complete control
152
+ - Quick setup
153
+ """)
154
+ if st.button("Use Direct Entry"):
155
+ st.session_state.prompt_creation_method = 'direct'
156
+ st.session_state.current_step = 'direct_prompt'
157
+ st.rerun()
158
+
159
+ elif st.session_state.current_step == 'direct_prompt':
160
+ st.subheader("Enter Your Prompt")
161
+ st.markdown(
162
+ "Write your complete prompt for generating invitations. Include all necessary details and requirements.")
163
+
164
+ direct_prompt = st.text_area(
165
+ "Your Prompt:",
166
+ value=st.session_state.direct_prompt,
167
+ placeholder="Example: Generate a professional invitation for a product launch...",
168
+ height=200
169
+ )
170
+
171
+ col1, col2 = st.columns([1, 5])
172
+ with col1:
173
+ if st.button("← Back"):
174
+ st.session_state.current_step = 'choose_method'
175
+ st.rerun()
176
+ with col2:
177
+ if st.button("Continue →"):
178
+ if direct_prompt:
179
+ st.session_state.direct_prompt = direct_prompt
180
+ st.session_state.final_prompt = direct_prompt
181
+ st.session_state.current_step = 'upload_process'
182
+ st.rerun()
183
+ else:
184
+ st.error("Please enter a prompt before continuing.")
185
+
186
+ elif st.session_state.prompt_creation_method == 'guided':
187
+ if st.session_state.current_step == 'initial_context':
188
+ st.subheader("Step 1: Provide Initial Context")
189
+ st.markdown("Briefly describe what your invitation is about (e.g., 'Launching a new GPU product')")
190
+
191
+ context = st.text_area(
192
+ "Context:",
193
+ value=st.session_state.context,
194
+ placeholder="Example: Launching a new GPU product for AI and HPC applications",
195
+ height=100
196
+ )
197
+
198
+ col1, col2 = st.columns([1, 5])
199
+ with col1:
200
+ if st.button("← Back"):
201
+ st.session_state.current_step = 'choose_method'
202
+ st.rerun()
203
+ with col2:
204
+ if st.button("Generate Questions →"):
205
+ if context:
206
+ st.session_state.context = context
207
+ st.session_state.questions = llm_handler.generate_questions(context)
208
+ st.session_state.current_step = 'answer_questions'
209
+ st.rerun()
210
+ else:
211
+ st.error("Please provide context before proceeding.")
212
+
213
+
214
+ # In the answer_questions section of your code, replace the multiselect implementation with this:
215
+
216
+ elif st.session_state.current_step == 'answer_questions':
217
+
218
+ st.subheader("Step 2: Answer Questions")
219
+
220
+ for i, question in enumerate(st.session_state.questions):
221
+
222
+ if 'choices' in question:
223
+
224
+ # Get previously selected options
225
+
226
+ previous_selections = st.session_state.multiselect_answers.get(i, [])
227
+
228
+ # Initialize base choices
229
+
230
+ base_choices = question['choices'].copy()
231
+
232
+ if "Custom" not in base_choices:
233
+ base_choices.append("Custom")
234
+
235
+ # Add any previous custom value to the choices if it exists
236
+
237
+ custom_values = [x for x in previous_selections if x not in question['choices'] and x != "Custom"]
238
+
239
+ all_choices = base_choices + custom_values
240
+
241
+ # Handle word count questions differently
242
+
243
+ if any(word in question['question'].lower() for word in ['word count', 'words', 'length']):
244
+
245
+ selected_options = st.multiselect(
246
+
247
+ question['question'],
248
+
249
+ options=all_choices,
250
+
251
+ default=previous_selections,
252
+
253
+ key=f"multiselect_{i}"
254
+
255
+ )
256
+
257
+ if "Custom" in selected_options:
258
+
259
+ # Pre-fill with previous custom value if exists
260
+
261
+ default_custom = next((x for x in previous_selections if x not in base_choices), "")
262
+
263
+ custom_value = st.text_input(
264
+
265
+ "Enter custom word count:",
266
+
267
+ value=default_custom,
268
+
269
+ key=f"custom_{i}"
270
+
271
+ )
272
+
273
+ if custom_value:
274
+
275
+ try:
276
+
277
+ word_count = int(custom_value)
278
+
279
+ if word_count > 0:
280
+
281
+ selected_options = [opt for opt in selected_options if opt != "Custom"]
282
+
283
+ if str(word_count) not in selected_options:
284
+ selected_options.append(str(word_count))
285
+
286
+ else:
287
+
288
+ st.error("Please enter a positive number")
289
+
290
+ except ValueError:
291
+
292
+ st.error("Please enter a valid number")
293
+
294
+
295
+ else:
296
+
297
+ # Regular non-numeric multiselect handling
298
+
299
+ selected_options = st.multiselect(
300
+
301
+ question['question'],
302
+
303
+ options=all_choices,
304
+
305
+ default=previous_selections,
306
+
307
+ key=f"multiselect_{i}"
308
+
309
+ )
310
+
311
+ if "Custom" in selected_options:
312
+
313
+ # Pre-fill with previous custom value if exists
314
+
315
+ default_custom = next((x for x in previous_selections if x not in base_choices), "")
316
+
317
+ custom_value = st.text_input(
318
+
319
+ "Enter your custom response:",
320
+
321
+ value=default_custom,
322
+
323
+ key=f"custom_{i}"
324
+
325
+ )
326
+
327
+ if custom_value:
328
+
329
+ selected_options = [opt for opt in selected_options if opt != "Custom"]
330
+
331
+ if custom_value not in selected_options:
332
+ selected_options.append(custom_value)
333
+
334
+ # Update session state
335
+
336
+ st.session_state.multiselect_answers[i] = selected_options
337
+
338
+ st.session_state.answers[i] = ", ".join(selected_options) if selected_options else ""
339
+
340
+
341
+ else:
342
+
343
+ # Handle non-choice questions
344
+
345
+ st.session_state.answers[i] = st.text_input(
346
+
347
+ question['question'],
348
+
349
+ value=st.session_state.answers.get(i, ""),
350
+
351
+ key=f"question_{i}"
352
+
353
+ )
354
+
355
+ col1, col2 = st.columns([1, 5])
356
+
357
+ with col1:
358
+
359
+ if st.button("← Back"):
360
+ st.session_state.current_step = 'initial_context'
361
+
362
+ st.rerun()
363
+
364
+ with col2:
365
+
366
+ if st.button("Generate Prompt →"):
367
+
368
+ if all(st.session_state.answers.values()):
369
+
370
+ st.session_state.final_prompt = llm_handler.generate_final_prompt(
371
+
372
+ st.session_state.context,
373
+
374
+ st.session_state.questions,
375
+
376
+ st.session_state.answers
377
+
378
+ )
379
+
380
+ st.session_state.current_step = 'edit_prompt'
381
+
382
+ st.rerun()
383
+
384
+ else:
385
+
386
+ st.error("Please answer all questions before proceeding.")
387
+ elif st.session_state.current_step == 'edit_prompt':
388
+ st.subheader("Step 3: Review and Edit Final Prompt")
389
+ edited_prompt = st.text_area(
390
+ "Edit your prompt if needed:",
391
+ value=st.session_state.final_prompt,
392
+ height=200
393
+ )
394
+
395
+ col1, col2 = st.columns([1, 5])
396
+ with col1:
397
+ if st.button("← Back"):
398
+ st.session_state.current_step = 'answer_questions'
399
+ st.rerun()
400
+ with col2:
401
+ if st.button("Continue to Upload →"):
402
+ st.session_state.final_prompt = edited_prompt
403
+ st.session_state.current_step = 'upload_process'
404
+ st.rerun()
405
+
406
+ # Common upload and processing section for both paths
407
+ if st.session_state.current_step == 'upload_process':
408
+ st.subheader("Upload and Process")
409
+ uploaded_file = st.file_uploader("📂 Upload CSV File", type=["csv"])
410
+
411
+ col1, col2 = st.columns([1, 5])
412
+ with col1:
413
+ if st.button("← Back"):
414
+ if st.session_state.prompt_creation_method == 'guided':
415
+ st.session_state.current_step = 'edit_prompt'
416
+ else:
417
+ st.session_state.current_step = 'direct_prompt'
418
+ st.rerun()
419
+
420
+ if uploaded_file is not None and st.session_state.final_prompt:
421
+ st.write("⏳ Processing your file... Please wait.")
422
+ processed_df = process_csv(uploaded_file, st.session_state.final_prompt)
423
+
424
+ st.write("### Generated Invitations")
425
+ st.dataframe(processed_df, use_container_width=True)
426
+
427
+ with tempfile.NamedTemporaryFile(delete=False, suffix=".csv") as temp_file:
428
+ processed_df.to_csv(temp_file.name, index=False)
429
+ temp_file.close()
430
+
431
+ st.download_button(
432
+ label="📥 Download Results CSV",
433
+ data=open(temp_file.name, "rb"),
434
+ file_name="generated_invitations.csv",
435
+ mime="text/csv",
436
+ )
437
+ os.unlink(temp_file.name)
438
+
439
+ # Reset button (moved to sidebar)
440
+ st.sidebar.markdown("---")
441
+ if st.sidebar.button("🔄 Start Over"):
442
+ st.session_state.prompt_creation_method = None
443
+ st.session_state.current_step = 'choose_method'
444
+ st.session_state.context = ""
445
+ st.session_state.questions = []
446
+ st.session_state.answers = {}
447
+ st.session_state.multiselect_answers = {}
448
+ st.session_state.custom_options = {}
449
+ st.session_state.final_prompt = ""
450
+ st.session_state.direct_prompt = ""
451
+ st.rerun()
452
+
453
+ st.markdown("---")
454
+ st.markdown("💡 **Tip:** Ensure your data aligns with the provided template for accurate results.")
groq_llms.py ADDED
@@ -0,0 +1,225 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import os
2
+ from langchain_groq import ChatGroq
3
+ from dotenv import load_dotenv
4
+
5
+ load_dotenv()
6
+
7
+ class LLMHandler:
8
+ def __init__(self, model_name="llama-3.3-70b-versatile"):
9
+ self.groq_api_key = os.getenv("GROQ_API_KEY")
10
+ if not self.groq_api_key:
11
+ raise ValueError("GROQ_API_KEY environment variable not set.")
12
+ self.llm = ChatGroq(groq_api_key=self.groq_api_key, model_name=model_name)
13
+
14
+ def generate_questions(self, context):
15
+ """Generate questions based on the initial context provided by the user."""
16
+ prompt = f"""
17
+ Based on this context about an invitation: "{context}"
18
+
19
+ Generate questions to gather necessary information for creating a professional invitation prompt.
20
+
21
+ Generate 8-12 focused questions. Include multiple choice options where appropriate.
22
+ Questions should cover:
23
+ 1. Senders Company/Organization and role details
24
+ 2. Product/service specific details
25
+ 3. Key specifications or features
26
+ 4. Approximate length of the invite [Word count]
27
+ 5. What information from the receivers details do you want to include and influence in the invite
28
+ 6. Tone and style preferences
29
+ 7. Additional information which you would like to provide [Type N/A if you wish not to]
30
+ 8. Call to action [multiple choice] for example [ contact phone number, visit our website, visit our social media etc]
31
+ 9. In context to Call to action question, ask a followup question [Textual response] for CTA
32
+ to collect the website link/ phone number/ social media handles etc.
33
+
34
+ Return the questions in this exact JSON format:
35
+ [
36
+ {{"question": "Question 1", "choices": ["Choice 1", "Choice 2"]}},
37
+ {{"question": "Question 2"}},
38
+ {{"question": "Question 3", "choices": ["Choice 1", "Choice 2", "Choice 3"]}}
39
+ ]
40
+
41
+ For questions without multiple choice options, omit the 'choices' key.
42
+ Make choices relevant but not exhaustive, as users will have option for custom responses.
43
+ """
44
+
45
+ # Default questions to use as fallback
46
+ default_questions = [
47
+ {
48
+ "question": "What is your role in the company?",
49
+ "choices": ["CEO", "CTO", "Director", "Product Manager"]
50
+ },
51
+ {
52
+ "question": "What is your company name?",
53
+ },
54
+ {
55
+ "question": "What is the name of your product/service?",
56
+ },
57
+ {
58
+ "question": "What is the suggested Invite lenght[word count] you prefer?",
59
+ },
60
+ {
61
+ "question": "What is the key technical specification or feature?",
62
+ },
63
+ #{
64
+ # "question": "What are the primary target applications?",
65
+ # "choices": ["AI/ML", "Scientific Computing", "Graphics/Gaming", "Enterprise"]
66
+ #},
67
+ {
68
+ "question": "Can you explain in brief about what the invite is about?",
69
+ },
70
+ {
71
+ "question": "Select the preferred tone for the invitation:",
72
+ "choices": ["Professional", "Innovation-focused", "Casual", "Business & Strategic", "Friendly"]
73
+ }
74
+ ]
75
+
76
+ try:
77
+ # Get response from LLM
78
+ response = self.llm.invoke(prompt)
79
+
80
+ # Extract the JSON string from the response
81
+ response_text = response.content.strip()
82
+
83
+ # Find the start and end of the JSON array
84
+ start_idx = response_text.find('[')
85
+ end_idx = response_text.rfind(']') + 1
86
+
87
+ if start_idx == -1 or end_idx == 0:
88
+ raise ValueError("Could not find JSON array in response")
89
+
90
+ json_str = response_text[start_idx:end_idx]
91
+
92
+ # Parse the JSON string
93
+ import json
94
+ questions = json.loads(json_str)
95
+
96
+ # Validate the question format
97
+ for question in questions:
98
+ if 'question' not in question:
99
+ raise ValueError("Question missing 'question' field")
100
+ if 'choices' in question and not isinstance(question['choices'], list):
101
+ raise ValueError("'choices' must be a list")
102
+
103
+ # If we successfully parsed the questions, return them
104
+ return questions
105
+
106
+ except Exception as e:
107
+ #print(f"Error parsing LLM response: {str(e)}")
108
+ print("Using default questions as fallback")
109
+ return default_questions
110
+
111
+ def generate_final_prompt(self, context, questions, answers):
112
+ """Generate the final prompt based on context and question answers."""
113
+ # Create a formatted string of answers, handling both predefined and custom responses
114
+ formatted_answers = []
115
+ for i, question in enumerate(questions):
116
+ answer = answers[i]
117
+ formatted_answers.append(f"Q: {question['question']}\nA: {answer}")
118
+
119
+ answers_text = "\n".join(formatted_answers)
120
+ prompt = (
121
+ f"Your task is to generate a professional prompt for invitation generation by using the below context and answers: \n"
122
+ f" The initial context provided by user to generate the questions are [Context] :{context} and"
123
+ f" The questions and answers provide detail information on how the prompt has to be designed [Answers]: {answers_text}. \n"
124
+ f" Please follow the below instructions while drafting the prompt: \n"
125
+ f" 1. Use the Complete Information in the context and answers. \n"
126
+ f" 2. You Should draft best suitable prompt that can be used for generating personalized invites based on the information provided by user. \n"
127
+ f" 3. Generate only the prompt and DO NOT include any statements like this in the beginning: \n"
128
+ f" [Here is a professional prompt for invitation generation based on the provided context and answers] \n"
129
+ #f"In addition, make sure the prompt generated includes the below points: \n"
130
+ #f" 1. If the receivers information is not related to context and answers, generate a professional generic invite.\n "
131
+ #f" for example: If the context is about gpu device, the receiver is a farmer, then provide a generic response highlighting its features. \n"
132
+ #f"but if the receiver is GENAI engineer, provide an invite highlighting on how it is suitable to their needs and ease their work. "
133
+ #f" 2. Aptly fit the receivers information in the invite and make sure it is not forcefully added in the invite"
134
+ f" The goal is by using this prompt, the user can obtain personalized invites to wide range of receivers work domain."
135
+ )
136
+ #response = self.llm.invoke(prompt)
137
+ #return response.content.strip()
138
+ prompt2 = f"""
139
+ Based on the initial context: "{context}" and the provided answers: {answers_text},
140
+ Generate a professional prompt for invitation generation by USING COMPLETE INFORMATION in the context and answers,
141
+ which is most suitable to generate the best invites.
142
+ The goal is, you should draft best suitable prompt that can be sent to LLM for generating personalized invites
143
+ # based on the information available in context and answers. \n
144
+
145
+ f" STRICTLY provide NO preamble.\n"
146
+ #f"2. If the recipient's field does not match the product domain, generate a professional generic invite instead.\n"
147
+ #f"3. If the recipient is not working at any company[for ex: self employed] do consider this case while drafting the prompt
148
+ #and think on how to handle this case.
149
+
150
+ #The response should consist ONLY of the generated prompt as per these instructions.
151
+ """
152
+ response = self.llm.invoke(prompt)
153
+ return response.content.strip()
154
+
155
+ def generate_response(self, user_prompt, data):
156
+ """Generate a concise response using the LLM based on user prompt and data."""
157
+
158
+ prompt = (
159
+ f"You are a professional AI model tasked with writing personalized invite texts that are brochure-suitable "
160
+ f"and tailored to the user's request and recipient details.\n\n"
161
+ f"User Prompt: {user_prompt}\n"
162
+ f"Recipient Details: {data}\n\n"
163
+ f"**Instructions:**\n"
164
+ f"1. Start the response with an appropriate salutation, such as 'Hello {data['Name']}' if available.\n"
165
+ f"2. Match the tone specified in the user prompt. If no tone is mentioned, use a formal tone.\n"
166
+ f"3. Write the invite within 90-100 words unless a specific length is provided.\n"
167
+ f"4. Strictly adhere to all instructions and details given in the user prompt.\n\n"
168
+ f"**Additional Guidelines:**\n"
169
+ f"1. Tailor the invite to align with the recipient’s context and profession. For example:\n"
170
+ f" - If the recipient's information is unrelated to the context, provide a general formal invite highlighting key features.\n"
171
+ f" - If the recipient is closely related to the context (e.g., a GENAI engineer for an AI product), highlight specific benefits relevant to their needs.\n"
172
+ f"2. Seamlessly incorporate recipient-specific details (e.g., Job Title, Industry) mentioned in user prompt only if they fit naturally into the invite.\n"
173
+ f"3. Do not forcefully match the applications of the user product with the recipients information. \n"
174
+ f"3. Avoid preambles, unnecessary symbols, or extraneous text.\n"
175
+ f"4. Return the final invite text cleanly, in concise with no demeaning language.\n\n"
176
+ f"**Goal:** Generate personalized invites suitable for a wide range of recipients while aligning with the product or service described in the user prompt."
177
+ )
178
+
179
+ prompt2 = (
180
+ f" You are a professional AI model tasked with writing personalized invite texts that are brochure-suitable "
181
+ f" and tailored as per the user prompt and details of the recipient.\n\n"
182
+ f"User Prompt: {user_prompt}\n\n"
183
+ f"Details of the Recipient: {data}\n\n"
184
+ f"Please follow the below instructions while drafting the Invite of the recipient:\n"
185
+ f"1. The response must start with appropriate salutations.\n"
186
+ f"2. Match the tone of the invite specified in the user prompt. If not mentioned, use a formal tone.\n"
187
+ f"3. If the user prompt does not specify the invite length, write the invite within 80-90 words.\n"
188
+ f"4. Make sure to **follow all the instructions** given in the user prompt. \n\n"
189
+ f"In addition, the invite generated SHOULD include the below points: \n"
190
+ f" 1. If the recipients information is not related to context of the user prompt, generate a professional formal invite with NO demeaning words.\n "
191
+ f" for example: If the context is about gpu device, the receiver is a farmer, then provide a generic response highlighting its features. \n"
192
+ f"but if the recipient is GENAI engineer, provide an invite highlighting on how it is suitable to their needs and ease their work. "
193
+ f" 2. Aptly fit the recipient-specific details (e.g., Job Title, Industry, Areas of Interest) as specified in the user prompt in the invite "
194
+ f"and make sure it is not forcefully added in the invite. \n"
195
+ f" 3. Avoid preambles, extraneous symbols, or unnecessary text.\n"
196
+ f" 4. Return only the final invite text in clean, concise language.\n\n"
197
+
198
+ f"The goal is to generate personalized invites to wide range of receivers in terms of work domain, while matching it with the product/service "
199
+ f"provided by the user, make sure the invites are fulfilling this goal. "
200
+
201
+
202
+
203
+
204
+ )
205
+ # prompt = (
206
+ # f"You are a professional AI model tasked with writing personalized invite texts that are brochure-suitable "
207
+ # f"and tailored to the user's request.\n\n"
208
+ # f"User Prompt: {user_prompt}\n\n"
209
+ # f"Details of the Recipient: {data}\n\n"
210
+ #f"Please follow the below instructions while drafting the Invite of the recipient:\n"
211
+ # f"1. The response must start with appropriate salutations.\n"
212
+ # f"2. Match the tone of the invite specified in the user prompt. If not mentioned, use a formal tone.\n"
213
+ # f"3. Incorporate recipient-specific details (e.g., Job Title, Industry, Areas of Interest) as specified in the user prompt. If not mentioned, "
214
+ # f"use the provided recipient details.\n"
215
+ # f"4. Adjust the technical depth based on the recipient's expertise level.\n"
216
+ # f"5. If the recipient's details does not match the product domain, generate a professional generic invite instead.\n"
217
+ # f"6. If the user prompt does not specify the invite length, write the invite within 50-60 words.\n\n"
218
+ # f"Constraints:\n"
219
+ # f"- Strictly adhere to all details mentioned in the user prompt.\n"
220
+ # f"- Avoid preambles, extraneous symbols, or unnecessary text.\n"
221
+ # f"- Return only the final invite text in clean, concise language."
222
+ #)
223
+
224
+ response = self.llm.invoke(prompt)
225
+ return response.content.strip()