Compare commits

...

3 Commits

Author SHA1 Message Date
ItzCrazyKns
6e1e5dd185 Merge branch 'master' into feat/custom-openai-temperature 2025-03-08 01:04:35 +05:30
VinceOPS
98c185c12e feat(custom-openai): temperature 2025-03-03 22:37:23 +01:00
VinceOPS
446adbef3f fix: format code 2025-03-03 22:36:47 +01:00
11 changed files with 58 additions and 7 deletions

View File

@@ -18,6 +18,7 @@ API_KEY = ""
[MODELS.CUSTOM_OPENAI]
API_KEY = ""
API_URL = ""
TEMPERATURE = 0.7
[MODELS.OLLAMA]
API_URL = "" # Ollama API URL - http://host.docker.internal:11434

View File

@@ -30,6 +30,7 @@ interface Config {
API_URL: string;
API_KEY: string;
MODEL_NAME: string;
TEMPERATURE: number;
};
};
API_ENDPOINTS: {
@@ -75,6 +76,9 @@ export const getCustomOpenaiApiUrl = () =>
export const getCustomOpenaiModelName = () =>
loadConfig().MODELS.CUSTOM_OPENAI.MODEL_NAME;
export const getCustomOpenaiTemperature = () =>
loadConfig().MODELS.CUSTOM_OPENAI.TEMPERATURE;
const mergeConfigs = (current: any, update: any): any => {
if (update === null || update === undefined) {
return current;

View File

@@ -8,6 +8,7 @@ import {
getCustomOpenaiApiKey,
getCustomOpenaiApiUrl,
getCustomOpenaiModelName,
getCustomOpenaiTemperature,
} from '../../config';
import { ChatOpenAI } from '@langchain/openai';
@@ -39,6 +40,7 @@ export const getAvailableChatModelProviders = async () => {
const customOpenAiApiKey = getCustomOpenaiApiKey();
const customOpenAiApiUrl = getCustomOpenaiApiUrl();
const customOpenAiModelName = getCustomOpenaiModelName();
const customOpenAiTemperature = getCustomOpenaiTemperature();
models['custom_openai'] = {
...(customOpenAiApiKey && customOpenAiApiUrl && customOpenAiModelName
@@ -48,7 +50,7 @@ export const getAvailableChatModelProviders = async () => {
model: new ChatOpenAI({
openAIApiKey: customOpenAiApiKey,
modelName: customOpenAiModelName,
temperature: 0.7,
temperature: customOpenAiTemperature,
configuration: {
baseURL: customOpenAiApiUrl,
},

View File

@@ -13,6 +13,7 @@ import {
getCustomOpenaiApiUrl,
getCustomOpenaiApiKey,
getCustomOpenaiModelName,
getCustomOpenaiTemperature,
} from '../config';
import logger from '../utils/logger';
@@ -60,6 +61,7 @@ router.get('/', async (_, res) => {
config['customOpenaiApiUrl'] = getCustomOpenaiApiUrl();
config['customOpenaiApiKey'] = getCustomOpenaiApiKey();
config['customOpenaiModelName'] = getCustomOpenaiModelName();
config['customOpenaiTemperature'] = getCustomOpenaiTemperature();
res.status(200).json(config);
} catch (err: any) {

View File

@@ -9,6 +9,7 @@ import {
getCustomOpenaiApiKey,
getCustomOpenaiApiUrl,
getCustomOpenaiModelName,
getCustomOpenaiTemperature,
} from '../config';
const router = express.Router();
@@ -50,7 +51,7 @@ router.post('/', async (req, res) => {
llm = new ChatOpenAI({
modelName: getCustomOpenaiModelName(),
openAIApiKey: getCustomOpenaiApiKey(),
temperature: 0.7,
temperature: getCustomOpenaiTemperature(),
configuration: {
baseURL: getCustomOpenaiApiUrl(),
},

View File

@@ -14,6 +14,7 @@ import {
getCustomOpenaiApiKey,
getCustomOpenaiApiUrl,
getCustomOpenaiModelName,
getCustomOpenaiTemperature,
} from '../config';
const router = express.Router();
@@ -23,6 +24,7 @@ interface chatModel {
model: string;
customOpenAIKey?: string;
customOpenAIBaseURL?: string;
customOpenAITemperature?: number;
}
interface embeddingModel {
@@ -87,7 +89,9 @@ router.post('/', async (req, res) => {
modelName: body.chatModel?.model || getCustomOpenaiModelName(),
openAIApiKey:
body.chatModel?.customOpenAIKey || getCustomOpenaiApiKey(),
temperature: 0.7,
temperature:
body.chatModel?.customOpenAITemperature ||
getCustomOpenaiTemperature(),
configuration: {
baseURL:
body.chatModel?.customOpenAIBaseURL || getCustomOpenaiApiUrl(),

View File

@@ -9,6 +9,7 @@ import {
getCustomOpenaiApiKey,
getCustomOpenaiApiUrl,
getCustomOpenaiModelName,
getCustomOpenaiTemperature,
} from '../config';
const router = express.Router();
@@ -49,7 +50,7 @@ router.post('/', async (req, res) => {
llm = new ChatOpenAI({
modelName: getCustomOpenaiModelName(),
openAIApiKey: getCustomOpenaiApiKey(),
temperature: 0.7,
temperature: getCustomOpenaiTemperature(),
configuration: {
baseURL: getCustomOpenaiApiUrl(),
},

View File

@@ -9,6 +9,7 @@ import {
getCustomOpenaiApiKey,
getCustomOpenaiApiUrl,
getCustomOpenaiModelName,
getCustomOpenaiTemperature,
} from '../config';
const router = express.Router();
@@ -50,7 +51,7 @@ router.post('/', async (req, res) => {
llm = new ChatOpenAI({
modelName: getCustomOpenaiModelName(),
openAIApiKey: getCustomOpenaiApiKey(),
temperature: 0.7,
temperature: getCustomOpenaiTemperature(),
configuration: {
baseURL: getCustomOpenaiApiUrl(),
},

View File

@@ -13,6 +13,7 @@ import {
getCustomOpenaiApiKey,
getCustomOpenaiApiUrl,
getCustomOpenaiModelName,
getCustomOpenaiTemperature,
} from '../config';
export const handleConnection = async (
@@ -56,12 +57,13 @@ export const handleConnection = async (
const customOpenaiApiKey = getCustomOpenaiApiKey();
const customOpenaiApiUrl = getCustomOpenaiApiUrl();
const customOpenaiModelName = getCustomOpenaiModelName();
const customOpenaiTemperature = getCustomOpenaiTemperature();
if (customOpenaiApiKey && customOpenaiApiUrl && customOpenaiModelName) {
llm = new ChatOpenAI({
modelName: customOpenaiModelName,
openAIApiKey: customOpenaiApiKey,
temperature: 0.7,
temperature: customOpenaiTemperature,
configuration: {
baseURL: customOpenaiApiUrl,
},

View File

@@ -23,6 +23,7 @@ interface SettingsType {
customOpenaiApiKey: string;
customOpenaiApiUrl: string;
customOpenaiModelName: string;
customOpenaiTemperature: number;
}
interface InputProps extends React.InputHTMLAttributes<HTMLInputElement> {
@@ -576,6 +577,32 @@ const Page = () => {
}
/>
</div>
<div className="flex flex-col space-y-1">
<p className="text-black/70 dark:text-white/70 text-sm">
Temperature
</p>
<Input
type="number"
min={0}
step={0.1}
max={1}
placeholder="Temperature"
value={config.customOpenaiTemperature}
isSaving={savingStates['customOpenaiTemperature']}
onChange={(e: React.ChangeEvent<HTMLInputElement>) => {
setConfig((prev) => ({
...prev!,
customOpenaiTemperature: parseInt(
e.target.value,
10,
),
}));
}}
onSave={(value) =>
saveConfig('customOpenaiTemperature', value)
}
/>
</div>
<div className="flex flex-col space-y-1">
<p className="text-black/70 dark:text-white/70 text-sm">
Custom OpenAI API Key

View File

@@ -68,7 +68,13 @@ const MessageBox = ({
return (
<div>
{message.role === 'user' && (
<div className={cn('w-full', messageIndex === 0 ? 'pt-16' : 'pt-8', 'break-words')}>
<div
className={cn(
'w-full',
messageIndex === 0 ? 'pt-16' : 'pt-8',
'break-words',
)}
>
<h2 className="text-black dark:text-white font-medium text-3xl lg:w-9/12">
{message.content}
</h2>