InviteAI / LLM_handler.py
dhanvanth183's picture
Added Local code for Invitation Generator
07bd23e
import os
from langchain_community.chat_models import ChatOpenAI
from langchain_groq import ChatGroq
from dotenv import load_dotenv
load_dotenv()
class LLMHandler:
def __init__(self, model_name="gpt-4", provider="openai"):
self.provider = provider
if provider == "openai":
self.api_key = os.getenv("OPENAI_API_KEY")
if not self.api_key:
raise ValueError("OPENAI_API_KEY environment variable not set.")
self.model = ChatOpenAI(api_key=self.api_key, model=model_name)
elif provider == "groq":
self.api_key = os.getenv("GROQ_API_KEY")
if not self.api_key:
raise ValueError("GROQ_API_KEY environment variable not set.")
self.model = ChatGroq(api_key=self.api_key, model_name=model_name)
else:
raise ValueError("Unsupported provider. Use 'openai' or 'groq'.")
def generate_text(self, input_data, user_instruction):
"""
Generates personalized text using the LLM.
"""
try:
prompt = self._build_prompt(input_data, user_instruction)
response = self.model.generate([prompt])
return response[0]["text"] # Adjust this depending on response format
except Exception as e:
print(f"Error during text generation: {e}")
return "Error generating response"
@staticmethod
def _build_prompt(input_data, user_instruction):
"""
Builds a structured prompt for the LLM.
"""
context = (
f"Name: {input_data.get('Name')}\n"
f"Job Title: {input_data.get('Job Title')}\n"
f"Organization: {input_data.get('Organization', input_data.get('Organisation'))}\n"
f"Area of Interest: {input_data.get('Area of Interest')}\n"
f"Category: {input_data.get('Category')}\n"
)
return f"{user_instruction}\n\nContext:\n{context}"