feat: integrate Google Gemini with dynamic model fetching and UI support
- Added Gemini AI model integration with automatic model list fetching - Updated configuration system to support Gemini API settings - Enhanced SettingsDialog with Gemini model selection options - Updated dependencies in package.json for Gemini support - Improved provider configuration in index.ts for better model management - Added Gemini API key field in sample configuration template - Updated route configuration for Gemini support
This commit is contained in:
parent
c650d1c3d9
commit
3dbc358ee3
7 changed files with 157 additions and 12 deletions
|
|
@ -7,6 +7,7 @@ import {
|
|||
getGroqApiKey,
|
||||
getOllamaApiEndpoint,
|
||||
getAnthropicApiKey,
|
||||
getGeminiApiKey,
|
||||
getOpenaiApiKey,
|
||||
updateConfig,
|
||||
} from '../config';
|
||||
|
|
@ -52,7 +53,7 @@ router.get('/', async (_, res) => {
|
|||
config['ollamaApiUrl'] = getOllamaApiEndpoint();
|
||||
config['anthropicApiKey'] = getAnthropicApiKey();
|
||||
config['groqApiKey'] = getGroqApiKey();
|
||||
|
||||
config['geminiApiKey'] = getGeminiApiKey();
|
||||
res.status(200).json(config);
|
||||
} catch (err: any) {
|
||||
res.status(500).json({ message: 'An error has occurred.' });
|
||||
|
|
@ -68,6 +69,7 @@ router.post('/', async (req, res) => {
|
|||
OPENAI: config.openaiApiKey,
|
||||
GROQ: config.groqApiKey,
|
||||
ANTHROPIC: config.anthropicApiKey,
|
||||
Gemini: config.geminiApiKey,
|
||||
},
|
||||
API_ENDPOINTS: {
|
||||
OLLAMA: config.ollamaApiUrl,
|
||||
|
|
|
|||
Loading…
Add table
Add a link
Reference in a new issue