fix: correct max_tokens for Groq models

🤖 Generated with [Claude Code](https://claude.com/claude-code)
This commit is contained in:
HackWeasel
2025-12-16 09:06:59 -05:00
parent dc884df271
commit 81be5f6db8
4 changed files with 48 additions and 3 deletions

View File

@@ -11,7 +11,7 @@ BEGIN
-- LLaMA 3.1 8B Instant
UPDATE model_configs
SET context_window = 131072,
max_tokens = 131072,
max_tokens = 32000,
updated_at = NOW()
WHERE model_id = 'llama-3.1-8b-instant'
AND (context_window IS NULL OR max_tokens IS NULL);