mirror of
https://github.com/ItzCrazyKns/Perplexica.git
synced 2025-06-15 06:18:41 +00:00
Compare commits
2 Commits
701819d018
...
bb21184ea2
Author | SHA1 | Date | |
---|---|---|---|
bb21184ea2 | |||
0c3740fdf2 |
@ -143,7 +143,7 @@ const Page = () => {
|
|||||||
const [selectedEmbeddingModel, setSelectedEmbeddingModel] = useState<
|
const [selectedEmbeddingModel, setSelectedEmbeddingModel] = useState<
|
||||||
string | null
|
string | null
|
||||||
>(null);
|
>(null);
|
||||||
const [isLoading, setIsLoading] = useState(false);
|
const [isLoading, setIsLoading] = useState(true);
|
||||||
const [automaticImageSearch, setAutomaticImageSearch] = useState(false);
|
const [automaticImageSearch, setAutomaticImageSearch] = useState(false);
|
||||||
const [automaticVideoSearch, setAutomaticVideoSearch] = useState(false);
|
const [automaticVideoSearch, setAutomaticVideoSearch] = useState(false);
|
||||||
const [systemInstructions, setSystemInstructions] = useState<string>('');
|
const [systemInstructions, setSystemInstructions] = useState<string>('');
|
||||||
@ -151,7 +151,6 @@ const Page = () => {
|
|||||||
|
|
||||||
useEffect(() => {
|
useEffect(() => {
|
||||||
const fetchConfig = async () => {
|
const fetchConfig = async () => {
|
||||||
setIsLoading(true);
|
|
||||||
const res = await fetch(`/api/config`, {
|
const res = await fetch(`/api/config`, {
|
||||||
headers: {
|
headers: {
|
||||||
'Content-Type': 'application/json',
|
'Content-Type': 'application/json',
|
||||||
|
@ -6,101 +6,31 @@ export const PROVIDER_INFO = {
|
|||||||
key: 'groq',
|
key: 'groq',
|
||||||
displayName: 'Groq',
|
displayName: 'Groq',
|
||||||
};
|
};
|
||||||
import { BaseChatModel } from '@langchain/core/language_models/chat_models';
|
|
||||||
|
|
||||||
const groqChatModels: Record<string, string>[] = [
|
import { BaseChatModel } from '@langchain/core/language_models/chat_models';
|
||||||
{
|
|
||||||
displayName: 'Gemma2 9B IT',
|
|
||||||
key: 'gemma2-9b-it',
|
|
||||||
},
|
|
||||||
{
|
|
||||||
displayName: 'Llama 3.3 70B Versatile',
|
|
||||||
key: 'llama-3.3-70b-versatile',
|
|
||||||
},
|
|
||||||
{
|
|
||||||
displayName: 'Llama 3.1 8B Instant',
|
|
||||||
key: 'llama-3.1-8b-instant',
|
|
||||||
},
|
|
||||||
{
|
|
||||||
displayName: 'Llama3 70B 8192',
|
|
||||||
key: 'llama3-70b-8192',
|
|
||||||
},
|
|
||||||
{
|
|
||||||
displayName: 'Llama3 8B 8192',
|
|
||||||
key: 'llama3-8b-8192',
|
|
||||||
},
|
|
||||||
{
|
|
||||||
displayName: 'Mixtral 8x7B 32768',
|
|
||||||
key: 'mixtral-8x7b-32768',
|
|
||||||
},
|
|
||||||
{
|
|
||||||
displayName: 'Qwen QWQ 32B (Preview)',
|
|
||||||
key: 'qwen-qwq-32b',
|
|
||||||
},
|
|
||||||
{
|
|
||||||
displayName: 'Mistral Saba 24B (Preview)',
|
|
||||||
key: 'mistral-saba-24b',
|
|
||||||
},
|
|
||||||
{
|
|
||||||
displayName: 'Qwen 2.5 Coder 32B (Preview)',
|
|
||||||
key: 'qwen-2.5-coder-32b',
|
|
||||||
},
|
|
||||||
{
|
|
||||||
displayName: 'Qwen 2.5 32B (Preview)',
|
|
||||||
key: 'qwen-2.5-32b',
|
|
||||||
},
|
|
||||||
{
|
|
||||||
displayName: 'DeepSeek R1 Distill Qwen 32B (Preview)',
|
|
||||||
key: 'deepseek-r1-distill-qwen-32b',
|
|
||||||
},
|
|
||||||
{
|
|
||||||
displayName: 'DeepSeek R1 Distill Llama 70B (Preview)',
|
|
||||||
key: 'deepseek-r1-distill-llama-70b',
|
|
||||||
},
|
|
||||||
{
|
|
||||||
displayName: 'Llama 3.3 70B SpecDec (Preview)',
|
|
||||||
key: 'llama-3.3-70b-specdec',
|
|
||||||
},
|
|
||||||
{
|
|
||||||
displayName: 'Llama 3.2 1B Preview (Preview)',
|
|
||||||
key: 'llama-3.2-1b-preview',
|
|
||||||
},
|
|
||||||
{
|
|
||||||
displayName: 'Llama 3.2 3B Preview (Preview)',
|
|
||||||
key: 'llama-3.2-3b-preview',
|
|
||||||
},
|
|
||||||
{
|
|
||||||
displayName: 'Llama 3.2 11B Vision Preview (Preview)',
|
|
||||||
key: 'llama-3.2-11b-vision-preview',
|
|
||||||
},
|
|
||||||
{
|
|
||||||
displayName: 'Llama 3.2 90B Vision Preview (Preview)',
|
|
||||||
key: 'llama-3.2-90b-vision-preview',
|
|
||||||
},
|
|
||||||
/* {
|
|
||||||
displayName: 'Llama 4 Maverick 17B 128E Instruct (Preview)',
|
|
||||||
key: 'meta-llama/llama-4-maverick-17b-128e-instruct',
|
|
||||||
}, */
|
|
||||||
{
|
|
||||||
displayName: 'Llama 4 Scout 17B 16E Instruct (Preview)',
|
|
||||||
key: 'meta-llama/llama-4-scout-17b-16e-instruct',
|
|
||||||
},
|
|
||||||
];
|
|
||||||
|
|
||||||
export const loadGroqChatModels = async () => {
|
export const loadGroqChatModels = async () => {
|
||||||
const groqApiKey = getGroqApiKey();
|
const groqApiKey = getGroqApiKey();
|
||||||
|
|
||||||
if (!groqApiKey) return {};
|
if (!groqApiKey) return {};
|
||||||
|
|
||||||
try {
|
try {
|
||||||
|
const res = await fetch('https://api.groq.com/openai/v1/models', {
|
||||||
|
method: 'GET',
|
||||||
|
headers: {
|
||||||
|
Authorization: `bearer ${groqApiKey}`,
|
||||||
|
'Content-Type': 'application/json',
|
||||||
|
},
|
||||||
|
});
|
||||||
|
|
||||||
|
const groqChatModels = (await res.json()).data;
|
||||||
const chatModels: Record<string, ChatModel> = {};
|
const chatModels: Record<string, ChatModel> = {};
|
||||||
|
|
||||||
groqChatModels.forEach((model) => {
|
groqChatModels.forEach((model: any) => {
|
||||||
chatModels[model.key] = {
|
chatModels[model.id] = {
|
||||||
displayName: model.displayName,
|
displayName: model.id,
|
||||||
model: new ChatOpenAI({
|
model: new ChatOpenAI({
|
||||||
openAIApiKey: groqApiKey,
|
openAIApiKey: groqApiKey,
|
||||||
modelName: model.key,
|
modelName: model.id,
|
||||||
temperature: 0.7,
|
temperature: 0.7,
|
||||||
configuration: {
|
configuration: {
|
||||||
baseURL: 'https://api.groq.com/openai/v1',
|
baseURL: 'https://api.groq.com/openai/v1',
|
||||||
|
Reference in New Issue
Block a user