+
{message.content}
From 98c185c12e7823c73dbb29becbecedb0868abbfa Mon Sep 17 00:00:00 2001
From: VinceOPS <10739306+VinceOPS@users.noreply.github.com>
Date: Mon, 3 Mar 2025 22:37:23 +0100
Subject: [PATCH 2/2] feat(custom-openai): temperature
---
sample.config.toml | 1 +
src/config.ts | 4 ++++
src/lib/providers/index.ts | 4 +++-
src/routes/config.ts | 2 ++
src/routes/images.ts | 3 ++-
src/routes/search.ts | 6 +++++-
src/routes/suggestions.ts | 3 ++-
src/routes/videos.ts | 3 ++-
src/websocket/connectionManager.ts | 4 +++-
ui/app/settings/page.tsx | 27 +++++++++++++++++++++++++++
10 files changed, 51 insertions(+), 6 deletions(-)
diff --git a/sample.config.toml b/sample.config.toml
index 7b09d67..ad09a4a 100644
--- a/sample.config.toml
+++ b/sample.config.toml
@@ -18,6 +18,7 @@ API_KEY = ""
[MODELS.CUSTOM_OPENAI]
API_KEY = ""
API_URL = ""
+TEMPERATURE = 0.7
[MODELS.OLLAMA]
API_URL = "" # Ollama API URL - http://host.docker.internal:11434
diff --git a/src/config.ts b/src/config.ts
index ab2a5db..21c6cb6 100644
--- a/src/config.ts
+++ b/src/config.ts
@@ -30,6 +30,7 @@ interface Config {
API_URL: string;
API_KEY: string;
MODEL_NAME: string;
+ TEMPERATURE: number;
};
};
API_ENDPOINTS: {
@@ -75,6 +76,9 @@ export const getCustomOpenaiApiUrl = () =>
export const getCustomOpenaiModelName = () =>
loadConfig().MODELS.CUSTOM_OPENAI.MODEL_NAME;
+export const getCustomOpenaiTemperature = () =>
+ loadConfig().MODELS.CUSTOM_OPENAI.TEMPERATURE;
+
const mergeConfigs = (current: any, update: any): any => {
if (update === null || update === undefined) {
return current;
diff --git a/src/lib/providers/index.ts b/src/lib/providers/index.ts
index 57e9185..51f587b 100644
--- a/src/lib/providers/index.ts
+++ b/src/lib/providers/index.ts
@@ -8,6 +8,7 @@ import {
getCustomOpenaiApiKey,
getCustomOpenaiApiUrl,
getCustomOpenaiModelName,
+ getCustomOpenaiTemperature,
} from '../../config';
import { ChatOpenAI } from '@langchain/openai';
@@ -39,6 +40,7 @@ export const getAvailableChatModelProviders = async () => {
const customOpenAiApiKey = getCustomOpenaiApiKey();
const customOpenAiApiUrl = getCustomOpenaiApiUrl();
const customOpenAiModelName = getCustomOpenaiModelName();
+ const customOpenAiTemperature = getCustomOpenaiTemperature();
models['custom_openai'] = {
...(customOpenAiApiKey && customOpenAiApiUrl && customOpenAiModelName
@@ -48,7 +50,7 @@ export const getAvailableChatModelProviders = async () => {
model: new ChatOpenAI({
openAIApiKey: customOpenAiApiKey,
modelName: customOpenAiModelName,
- temperature: 0.7,
+ temperature: customOpenAiTemperature,
configuration: {
baseURL: customOpenAiApiUrl,
},
diff --git a/src/routes/config.ts b/src/routes/config.ts
index 18b370d..0a24fdf 100644
--- a/src/routes/config.ts
+++ b/src/routes/config.ts
@@ -13,6 +13,7 @@ import {
getCustomOpenaiApiUrl,
getCustomOpenaiApiKey,
getCustomOpenaiModelName,
+ getCustomOpenaiTemperature,
} from '../config';
import logger from '../utils/logger';
@@ -60,6 +61,7 @@ router.get('/', async (_, res) => {
config['customOpenaiApiUrl'] = getCustomOpenaiApiUrl();
config['customOpenaiApiKey'] = getCustomOpenaiApiKey();
config['customOpenaiModelName'] = getCustomOpenaiModelName();
+ config['customOpenaiTemperature'] = getCustomOpenaiTemperature();
res.status(200).json(config);
} catch (err: any) {
diff --git a/src/routes/images.ts b/src/routes/images.ts
index 5671657..1a96494 100644
--- a/src/routes/images.ts
+++ b/src/routes/images.ts
@@ -9,6 +9,7 @@ import {
getCustomOpenaiApiKey,
getCustomOpenaiApiUrl,
getCustomOpenaiModelName,
+ getCustomOpenaiTemperature,
} from '../config';
const router = express.Router();
@@ -50,7 +51,7 @@ router.post('/', async (req, res) => {
llm = new ChatOpenAI({
modelName: getCustomOpenaiModelName(),
openAIApiKey: getCustomOpenaiApiKey(),
- temperature: 0.7,
+ temperature: getCustomOpenaiTemperature(),
configuration: {
baseURL: getCustomOpenaiApiUrl(),
},
diff --git a/src/routes/search.ts b/src/routes/search.ts
index 57d90a3..78a58fc 100644
--- a/src/routes/search.ts
+++ b/src/routes/search.ts
@@ -14,6 +14,7 @@ import {
getCustomOpenaiApiKey,
getCustomOpenaiApiUrl,
getCustomOpenaiModelName,
+ getCustomOpenaiTemperature,
} from '../config';
const router = express.Router();
@@ -23,6 +24,7 @@ interface chatModel {
model: string;
customOpenAIKey?: string;
customOpenAIBaseURL?: string;
+ customOpenAITemperature?: number;
}
interface embeddingModel {
@@ -87,7 +89,9 @@ router.post('/', async (req, res) => {
modelName: body.chatModel?.model || getCustomOpenaiModelName(),
openAIApiKey:
body.chatModel?.customOpenAIKey || getCustomOpenaiApiKey(),
- temperature: 0.7,
+ temperature:
+ body.chatModel?.customOpenAITemperature ||
+ getCustomOpenaiTemperature(),
configuration: {
baseURL:
body.chatModel?.customOpenAIBaseURL || getCustomOpenaiApiUrl(),
diff --git a/src/routes/suggestions.ts b/src/routes/suggestions.ts
index 7dd1739..4f2791e 100644
--- a/src/routes/suggestions.ts
+++ b/src/routes/suggestions.ts
@@ -9,6 +9,7 @@ import {
getCustomOpenaiApiKey,
getCustomOpenaiApiUrl,
getCustomOpenaiModelName,
+ getCustomOpenaiTemperature,
} from '../config';
const router = express.Router();
@@ -49,7 +50,7 @@ router.post('/', async (req, res) => {
llm = new ChatOpenAI({
modelName: getCustomOpenaiModelName(),
openAIApiKey: getCustomOpenaiApiKey(),
- temperature: 0.7,
+ temperature: getCustomOpenaiTemperature(),
configuration: {
baseURL: getCustomOpenaiApiUrl(),
},
diff --git a/src/routes/videos.ts b/src/routes/videos.ts
index b631f26..f64b37e 100644
--- a/src/routes/videos.ts
+++ b/src/routes/videos.ts
@@ -9,6 +9,7 @@ import {
getCustomOpenaiApiKey,
getCustomOpenaiApiUrl,
getCustomOpenaiModelName,
+ getCustomOpenaiTemperature,
} from '../config';
const router = express.Router();
@@ -50,7 +51,7 @@ router.post('/', async (req, res) => {
llm = new ChatOpenAI({
modelName: getCustomOpenaiModelName(),
openAIApiKey: getCustomOpenaiApiKey(),
- temperature: 0.7,
+ temperature: getCustomOpenaiTemperature(),
configuration: {
baseURL: getCustomOpenaiApiUrl(),
},
diff --git a/src/websocket/connectionManager.ts b/src/websocket/connectionManager.ts
index bb8f242..36f3f96 100644
--- a/src/websocket/connectionManager.ts
+++ b/src/websocket/connectionManager.ts
@@ -13,6 +13,7 @@ import {
getCustomOpenaiApiKey,
getCustomOpenaiApiUrl,
getCustomOpenaiModelName,
+ getCustomOpenaiTemperature,
} from '../config';
export const handleConnection = async (
@@ -56,12 +57,13 @@ export const handleConnection = async (
const customOpenaiApiKey = getCustomOpenaiApiKey();
const customOpenaiApiUrl = getCustomOpenaiApiUrl();
const customOpenaiModelName = getCustomOpenaiModelName();
+ const customOpenaiTemperature = getCustomOpenaiTemperature();
if (customOpenaiApiKey && customOpenaiApiUrl && customOpenaiModelName) {
llm = new ChatOpenAI({
modelName: customOpenaiModelName,
openAIApiKey: customOpenaiApiKey,
- temperature: 0.7,
+ temperature: customOpenaiTemperature,
configuration: {
baseURL: customOpenaiApiUrl,
},
diff --git a/ui/app/settings/page.tsx b/ui/app/settings/page.tsx
index 6aff1b0..9bdcb04 100644
--- a/ui/app/settings/page.tsx
+++ b/ui/app/settings/page.tsx
@@ -23,6 +23,7 @@ interface SettingsType {
customOpenaiApiKey: string;
customOpenaiApiUrl: string;
customOpenaiModelName: string;
+ customOpenaiTemperature: number;
}
interface InputProps extends React.InputHTMLAttributes {
@@ -576,6 +577,32 @@ const Page = () => {
}
/>
+
+
+ Temperature
+
+
) => {
+ setConfig((prev) => ({
+ ...prev!,
+ customOpenaiTemperature: parseInt(
+ e.target.value,
+ 10,
+ ),
+ }));
+ }}
+ onSave={(value) =>
+ saveConfig('customOpenaiTemperature', value)
+ }
+ />
+