chore: Update eslint and prettier configurations

This commit is contained in:
Jin Yucong 2024-07-05 14:19:53 +08:00
parent c63c9b5c8a
commit 5b1aaee605
24 changed files with 826 additions and 38 deletions

View file

@ -66,7 +66,7 @@ const basicAcademicSearchResponsePrompt = `
const strParser = new StringOutputParser();
const handleStream = async (
stream: AsyncGenerator<StreamEvent, any, unknown>,
stream: AsyncGenerator<StreamEvent, unknown, unknown>,
emitter: eventEmitter,
) => {
for await (const event of stream) {

View file

@ -66,7 +66,7 @@ const basicRedditSearchResponsePrompt = `
const strParser = new StringOutputParser();
const handleStream = async (
stream: AsyncGenerator<StreamEvent, any, unknown>,
stream: AsyncGenerator<StreamEvent, unknown, unknown>,
emitter: eventEmitter,
) => {
for await (const event of stream) {

View file

@ -66,7 +66,7 @@ const basicWebSearchResponsePrompt = `
const strParser = new StringOutputParser();
const handleStream = async (
stream: AsyncGenerator<StreamEvent, any, unknown>,
stream: AsyncGenerator<StreamEvent, unknown, unknown>,
emitter: eventEmitter,
) => {
for await (const event of stream) {

View file

@ -65,7 +65,7 @@ const basicWolframAlphaSearchResponsePrompt = `
const strParser = new StringOutputParser();
const handleStream = async (
stream: AsyncGenerator<StreamEvent, any, unknown>,
stream: AsyncGenerator<StreamEvent, unknown, unknown>,
emitter: eventEmitter,
) => {
for await (const event of stream) {
@ -153,7 +153,7 @@ const createBasicWolframAlphaSearchAnsweringChain = (llm: BaseChatModel) => {
chat_history: formatChatHistoryAsString(input.chat_history),
}),
basicWolframAlphaSearchRetrieverChain
.pipe(({ query, docs }) => {
.pipe(({ docs }) => {
return docs;
})
.withConfig({
@ -210,7 +210,8 @@ const handleWolframAlphaSearch = (
message: string,
history: BaseMessage[],
llm: BaseChatModel,
embeddings: Embeddings,
// eslint-disable-next-line @typescript-eslint/no-unused-vars
_embeddings: Embeddings,
) => {
const emitter = basicWolframAlphaSearch(message, history, llm);
return emitter;

View file

@ -19,7 +19,7 @@ Since you are a writing assistant, you would not perform web searches. If you th
const strParser = new StringOutputParser();
const handleStream = async (
stream: AsyncGenerator<StreamEvent, any, unknown>,
stream: AsyncGenerator<StreamEvent, unknown, unknown>,
emitter: eventEmitter,
) => {
for await (const event of stream) {
@ -59,7 +59,8 @@ const handleWritingAssistant = (
query: string,
history: BaseMessage[],
llm: BaseChatModel,
embeddings: Embeddings,
// eslint-disable-next-line @typescript-eslint/no-unused-vars
_embeddings: Embeddings,
) => {
const emitter = new eventEmitter();

View file

@ -66,7 +66,7 @@ const basicYoutubeSearchResponsePrompt = `
const strParser = new StringOutputParser();
const handleStream = async (
stream: AsyncGenerator<StreamEvent, any, unknown>,
stream: AsyncGenerator<StreamEvent, unknown, unknown>,
emitter: eventEmitter,
) => {
for await (const event of stream) {

View file

@ -26,7 +26,7 @@ type RecursivePartial<T> = {
const loadConfig = () =>
toml.parse(
fs.readFileSync(path.join(__dirname, `../${configFileName}`), 'utf-8'),
) as any as Config;
) as unknown as Config;
export const getPort = () => loadConfig().GENERAL.PORT;

View file

@ -28,6 +28,7 @@ export class HuggingFaceTransformersEmbeddings
timeout?: number;
// eslint-disable-next-line @typescript-eslint/no-explicit-any
private pipelinePromise: Promise<any>;
constructor(fields?: Partial<HuggingFaceTransformersEmbeddingsParams>) {

View file

@ -102,6 +102,7 @@ export const getAvailableChatModelProviders = async () => {
},
});
// eslint-disable-next-line @typescript-eslint/no-explicit-any
const { models: ollamaModels } = (await response.json()) as any;
models['ollama'] = ollamaModels.reduce((acc, model) => {
@ -153,6 +154,7 @@ export const getAvailableEmbeddingModelProviders = async () => {
},
});
// eslint-disable-next-line @typescript-eslint/no-explicit-any
const { models: ollamaModels } = (await response.json()) as any;
models['ollama'] = ollamaModels.reduce((acc, model) => {

View file

@ -9,9 +9,10 @@ const router = express.Router();
router.post('/', async (req, res) => {
try {
let { query, chat_history, chat_model_provider, chat_model } = req.body;
const { query, chat_history: raw_chat_history, chat_model_provider, chat_model } = req.body;
chat_history = chat_history.map((msg: any) => {
// eslint-disable-next-line @typescript-eslint/no-explicit-any
const chat_history = raw_chat_history.map((msg: any) => {
if (msg.role === 'user') {
return new HumanMessage(msg.content);
} else if (msg.role === 'assistant') {

View file

@ -9,9 +9,10 @@ const router = express.Router();
router.post('/', async (req, res) => {
try {
let { chat_history, chat_model, chat_model_provider } = req.body;
const { chat_history: raw_chat_history, chat_model, chat_model_provider } = req.body;
chat_history = chat_history.map((msg: any) => {
// eslint-disable-next-line @typescript-eslint/no-explicit-any
const chat_history = raw_chat_history.map((msg: any) => {
if (msg.role === 'user') {
return new HumanMessage(msg.content);
} else if (msg.role === 'assistant') {

View file

@ -9,9 +9,10 @@ const router = express.Router();
router.post('/', async (req, res) => {
try {
let { query, chat_history, chat_model_provider, chat_model } = req.body;
const { query, chat_history: raw_chat_history, chat_model_provider, chat_model } = req.body;
chat_history = chat_history.map((msg: any) => {
// eslint-disable-next-line @typescript-eslint/no-explicit-any
const chat_history = raw_chat_history.map((msg: any) => {
if (msg.role === 'user') {
return new HumanMessage(msg.content);
} else if (msg.role === 'assistant') {