LLM Models

Access 100+ language models through a single unified API. GPT-4, Claude 3, Gemini Pro, Llama 3, and more.

const models = [ { name: 'GPT-4o', provider: 'openai', context: 128000, price: 2.5, tier: 'flagship', desc: 'Latest GPT-4 with vision' }, { name: 'GPT-4o-mini', provider: 'openai', context: 128000, price: 0.15, tier: 'balanced', desc: 'Fast, affordable GPT-4' }, { name: 'GPT-4 Turbo', provider: 'openai', context: 128000, price: 10, tier: 'flagship', desc: 'Previous flagship model' }, { name: 'Claude 3.5 Sonnet', provider: 'anthropic', context: 200000, price: 3, tier: 'flagship', desc: 'Anthropic\'s best model' }, { name: 'Claude 3 Haiku', provider: 'anthropic', context: 200000, price: 0.25, tier: 'balanced', desc: 'Fast, efficient model' }, { name: 'Claude 3 Opus', provider: 'anthropic', context: 200000, price: 15, tier: 'flagship', desc: 'Most capable model' }, { name: 'Gemini 1.5 Pro', provider: 'google', context: 1000000, price: 1.25, tier: 'flagship', desc: '1M token context' }, { name: 'Gemini 1.5 Flash', provider: 'google', context: 1000000, price: 0.075, tier: 'balanced', desc: 'Fast, cheap model' }, { name: 'Llama 3.1 70B', provider: 'meta', context: 128000, price: 0.9, tier: 'flagship', desc: 'Open source flagship' }, { name: 'Llama 3.1 8B', provider: 'meta', context: 128000, price: 0.2, tier: 'balanced', desc: 'Lightweight open model' }, { name: 'Mistral Large', provider: 'mistral', context: 128000, price: 2, tier: 'flagship', desc: 'European flagship' }, { name: 'Mistral Nemo', provider: 'mistral', context: 128000, price: 0.15, tier: 'balanced', desc: 'Balanced performance' }, ]; function renderModels(filtered = models) { const grid = document.getElementById('modelsGrid'); grid.innerHTML = filtered.map(m => `
${m.provider} ${m.tier}

${m.name}

${m.desc}

${(m.context/1000).toLocaleString()}K ctx $${m.price}/M
`).join(''); document.getElementById('searchInput').addEventListener('input', (e) => { const q = e.target.value.toLowerCase(); renderModels(models.filter(m => m.name.toLowerCase().includes(q) || m.desc.toLowerCase().includes(q))); }); document.getElementById('providerFilter').addEventListener('change', (e) => { const p = e.target.value; renderModels(p ? models.filter(m => m.provider === p) : models); }); renderModels();