chore: update dependency

fix: fix typescript errors
This commit is contained in:
Justin Luoma 2024-05-24 15:39:00 -04:00
parent 62910b5879
commit d788ca8eba
6 changed files with 292 additions and 296 deletions

View file

@ -1,4 +1,4 @@
import { RunnableSequence, RunnableMap } from '@langchain/core/runnables'; import {RunnableMap, RunnableSequence} from '@langchain/core/runnables';
import ListLineOutputParser from '../lib/outputParsers/listLineOutputParser'; import ListLineOutputParser from '../lib/outputParsers/listLineOutputParser';
import {PromptTemplate} from '@langchain/core/prompts'; import {PromptTemplate} from '@langchain/core/prompts';
import formatChatHistoryAsString from '../utils/formatHistory'; import formatChatHistoryAsString from '../utils/formatHistory';
@ -45,10 +45,10 @@ const createSuggestionGeneratorChain = (llm: BaseChatModel) => {
const generateSuggestions = ( const generateSuggestions = (
input: SuggestionGeneratorInput, input: SuggestionGeneratorInput,
llm: BaseChatModel, llm: ChatOpenAI,
) => { ) => {
(llm as ChatOpenAI).temperature = 0; llm.temperature = 0;
const suggestionGeneratorChain = createSuggestionGeneratorChain(llm); const suggestionGeneratorChain = createSuggestionGeneratorChain(llm as unknown as BaseChatModel);
return suggestionGeneratorChain.invoke(input); return suggestionGeneratorChain.invoke(input);
}; };

View file

@ -2,7 +2,7 @@ import express from 'express';
import generateSuggestions from '../agents/suggestionGeneratorAgent'; import generateSuggestions from '../agents/suggestionGeneratorAgent';
import {BaseChatModel} from '@langchain/core/language_models/chat_models'; import {BaseChatModel} from '@langchain/core/language_models/chat_models';
import {getAvailableChatModelProviders} from '../lib/providers'; import {getAvailableChatModelProviders} from '../lib/providers';
import { HumanMessage, AIMessage } from '@langchain/core/messages'; import {AIMessage, HumanMessage} from '@langchain/core/messages';
import logger from '../utils/logger'; import logger from '../utils/logger';
const router = express.Router(); const router = express.Router();
@ -34,6 +34,7 @@ router.post('/', async (req, res) => {
return; return;
} }
// @ts-ignore
const suggestions = await generateSuggestions({ chat_history }, llm); const suggestions = await generateSuggestions({ chat_history }, llm);
res.status(200).json({ suggestions: suggestions }); res.status(200).json({ suggestions: suggestions });

View file

@ -1,9 +1,6 @@
import {WebSocket} from 'ws'; import {WebSocket} from 'ws';
import {handleMessage} from './messageHandler'; import {handleMessage} from './messageHandler';
import { import {getAvailableChatModelProviders, getAvailableEmbeddingModelProviders,} from '../lib/providers';
getAvailableEmbeddingModelProviders,
getAvailableChatModelProviders,
} from '../lib/providers';
import {BaseChatModel} from '@langchain/core/language_models/chat_models'; import {BaseChatModel} from '@langchain/core/language_models/chat_models';
import type {Embeddings} from '@langchain/core/embeddings'; import type {Embeddings} from '@langchain/core/embeddings';
import type {IncomingMessage} from 'http'; import type {IncomingMessage} from 'http';
@ -49,7 +46,7 @@ export const handleConnection = async (
| BaseChatModel | BaseChatModel
| undefined; | undefined;
} else if (chatModelProvider == 'custom_openai') { } else if (chatModelProvider == 'custom_openai') {
llm = new ChatOpenAI({ (llm as unknown as ChatOpenAI) = new ChatOpenAI({
modelName: chatModel, modelName: chatModel,
openAIApiKey: searchParams.get('openAIApiKey'), openAIApiKey: searchParams.get('openAIApiKey'),
temperature: 0.7, temperature: 0.7,

View file

@ -1,12 +1,4 @@
import { import {BadgePercent, ChevronDown, CopyPlus, Globe, Pencil, ScanEye, SwatchBook,} from 'lucide-react';
BadgePercent,
ChevronDown,
CopyPlus,
Globe,
Pencil,
ScanEye,
SwatchBook,
} from 'lucide-react';
import {cn} from '@/lib/utils'; import {cn} from '@/lib/utils';
import {Popover, Switch, Transition} from '@headlessui/react'; import {Popover, Switch, Transition} from '@headlessui/react';
import {SiReddit, SiYoutube} from '@icons-pack/react-simple-icons'; import {SiReddit, SiYoutube} from '@icons-pack/react-simple-icons';
@ -55,8 +47,8 @@ const focusModes = [
icon: ( icon: (
<SiYoutube <SiYoutube
className="h-5 w-auto mr-0.5" className="h-5 w-auto mr-0.5"
onPointerEnterCapture={undefined} onPointerEnter={undefined}
onPointerLeaveCapture={undefined} onPointerLeave={undefined}
/> />
), ),
}, },
@ -67,8 +59,8 @@ const focusModes = [
icon: ( icon: (
<SiReddit <SiReddit
className="h-5 w-auto mr-0.5" className="h-5 w-auto mr-0.5"
onPointerEnterCapture={undefined} onPointerEnter={undefined}
onPointerLeaveCapture={undefined} onPointerLeave={undefined}
/> />
), ),
}, },

View file

@ -12,7 +12,7 @@
}, },
"dependencies": { "dependencies": {
"@headlessui/react": "^1.7.18", "@headlessui/react": "^1.7.18",
"@icons-pack/react-simple-icons": "^9.4.0", "@icons-pack/react-simple-icons": "^9.5.0",
"@langchain/openai": "^0.0.25", "@langchain/openai": "^0.0.25",
"@tailwindcss/typography": "^0.5.12", "@tailwindcss/typography": "^0.5.12",
"clsx": "^2.1.0", "clsx": "^2.1.0",

File diff suppressed because it is too large Load diff