|
|
|
|
|
LLM_CONFIG = { |
|
|
"primary_provider": "huggingface", |
|
|
"models": { |
|
|
"reasoning_primary": { |
|
|
"model_id": "mistralai/Mistral-7B-Instruct-v0.2", |
|
|
"task": "general_reasoning", |
|
|
"max_tokens": 2000, |
|
|
"temperature": 0.7, |
|
|
"cost_per_token": 0.000015, |
|
|
"fallback": "meta-llama/Llama-2-7b-chat-hf" |
|
|
}, |
|
|
"embedding_specialist": { |
|
|
"model_id": "sentence-transformers/all-MiniLM-L6-v2", |
|
|
"task": "embeddings", |
|
|
"vector_dimensions": 384, |
|
|
"purpose": "semantic_similarity", |
|
|
"cost_advantage": "90%_cheaper_than_primary" |
|
|
}, |
|
|
"classification_specialist": { |
|
|
"model_id": "cardiffnlp/twitter-roberta-base-emotion", |
|
|
"task": "intent_classification", |
|
|
"max_length": 512, |
|
|
"specialization": "fast_inference", |
|
|
"latency_target": "<100ms" |
|
|
}, |
|
|
"safety_checker": { |
|
|
"model_id": "unitary/unbiased-toxic-roberta", |
|
|
"task": "content_moderation", |
|
|
"confidence_threshold": 0.85, |
|
|
"purpose": "bias_detection" |
|
|
} |
|
|
}, |
|
|
"routing_logic": { |
|
|
"strategy": "task_based_routing", |
|
|
"fallback_chain": ["primary", "fallback", "degraded_mode"], |
|
|
"load_balancing": "round_robin_with_health_check" |
|
|
} |
|
|
} |
|
|
|
|
|
|