HonestAI / src /models_config.py
JatsTheAIGen's picture
Integrate Novita AI as exclusive inference provider - Add Novita AI API integration with DeepSeek-R1-Distill-Qwen-7B model - Remove all local model dependencies - Optimize token allocation for user inputs and context - Add Anaconda environment setup files - Add comprehensive test scripts and documentation
927854c
raw
history blame
1.65 kB
# models_config.py
# UPDATED: Novita AI API only - no local models
LLM_CONFIG = {
"primary_provider": "novita_api",
"models": {
"reasoning_primary": {
"model_id": "deepseek-ai/DeepSeek-R1-Distill-Qwen-7B:de-1a706eeafbf3ebc2",
"task": "general_reasoning",
"max_tokens": 4096,
"temperature": 0.6, # Recommended for DeepSeek-R1
"top_p": 0.95,
"force_reasoning_prefix": True,
"is_chat_model": True
},
"classification_specialist": {
"model_id": "deepseek-ai/DeepSeek-R1-Distill-Qwen-7B:de-1a706eeafbf3ebc2",
"task": "intent_classification",
"max_tokens": 512,
"temperature": 0.5, # Lower for consistency
"top_p": 0.9,
"force_reasoning_prefix": False,
"is_chat_model": True
},
"safety_checker": {
"model_id": "deepseek-ai/DeepSeek-R1-Distill-Qwen-7B:de-1a706eeafbf3ebc2",
"task": "content_moderation",
"max_tokens": 1024,
"temperature": 0.5,
"top_p": 0.9,
"force_reasoning_prefix": False,
"is_chat_model": True
},
"embedding_specialist": {
"model_id": "deepseek-ai/DeepSeek-R1-Distill-Qwen-7B:de-1a706eeafbf3ebc2",
"task": "embeddings",
"note": "Embeddings via Novita API - may require special handling",
"is_chat_model": True
}
},
"routing_logic": {
"strategy": "novita_api_only",
"fallback_chain": [],
"load_balancing": "single_endpoint"
}
}