malek-messaoudii commited on
Commit
e2251fd
·
1 Parent(s): 870d2ba

refactor: Update GROQ_TOPIC_MODEL for consistency and adjust fallback models in TopicService to enhance model selection and robustness.

Browse files
Files changed (2) hide show
  1. config.py +1 -1
  2. services/topic_service.py +4 -4
config.py CHANGED
@@ -43,7 +43,7 @@ GROQ_TTS_FORMAT = "wav"
43
  GROQ_CHAT_MODEL = "llama3-70b-8192"
44
 
45
  # **Topic Extraction Model**
46
- GROQ_TOPIC_MODEL = "llama-3.1-70b-versatile" # Alternative: "llama3-70b-8192" or "llama-3.1-8b-instant"
47
 
48
  # ============ SUPABASE ============
49
  SUPABASE_URL = os.getenv("SUPABASE_URL", "")
 
43
  GROQ_CHAT_MODEL = "llama3-70b-8192"
44
 
45
  # **Topic Extraction Model**
46
+ GROQ_TOPIC_MODEL = "llama3-70b-8192" # Using same model as chat for consistency
47
 
48
  # ============ SUPABASE ============
49
  SUPABASE_URL = os.getenv("SUPABASE_URL", "")
services/topic_service.py CHANGED
@@ -23,13 +23,13 @@ class TopicService:
23
  def __init__(self):
24
  self.llm = None
25
  # Use valid Groq model - defaults from config, fallback to common models
26
- self.model_name = GROQ_TOPIC_MODEL if GROQ_TOPIC_MODEL else "llama-3.1-70b-versatile"
27
- # Fallback models to try if primary fails
28
  self.fallback_models = [
29
- "llama-3.1-70b-versatile",
30
  "llama3-70b-8192",
31
  "llama-3.1-8b-instant",
32
- "mixtral-8x7b-32768"
 
33
  ]
34
  self.initialized = False
35
 
 
23
  def __init__(self):
24
  self.llm = None
25
  # Use valid Groq model - defaults from config, fallback to common models
26
+ self.model_name = GROQ_TOPIC_MODEL if GROQ_TOPIC_MODEL else "llama3-70b-8192"
27
+ # Fallback models to try if primary fails (using current/available Groq models)
28
  self.fallback_models = [
 
29
  "llama3-70b-8192",
30
  "llama-3.1-8b-instant",
31
+ "mixtral-8x7b-32768",
32
+ "gemma2-9b-it" # Alternative smaller model
33
  ]
34
  self.initialized = False
35