diff --git a/.assets/manifest.json b/.assets/manifest.json deleted file mode 100644 index e69de29..0000000 diff --git a/.github/workflows/docker-build.yaml b/.github/workflows/docker-build.yaml index 29f7987..ea956ea 100644 --- a/.github/workflows/docker-build.yaml +++ b/.github/workflows/docker-build.yaml @@ -114,11 +114,6 @@ jobs: username: ${{ secrets.DOCKER_USERNAME }} password: ${{ secrets.DOCKER_PASSWORD }} - - name: Extract version from release tag - if: github.event_name == 'release' - id: version - run: echo "RELEASE_VERSION=${GITHUB_REF#refs/tags/}" >> $GITHUB_ENV - - name: Create and push multi-arch manifest for main if: github.ref == 'refs/heads/master' && github.event_name == 'push' run: | diff --git a/.gitignore b/.gitignore index 9fb5e4c..c95173d 100644 --- a/.gitignore +++ b/.gitignore @@ -37,5 +37,3 @@ Thumbs.db # Db db.sqlite /searxng - -certificates \ No newline at end of file diff --git a/README.md b/README.md index 5eb0713..6540c73 100644 --- a/README.md +++ b/README.md @@ -16,7 +16,7 @@
-[![Discord](https://dcbadge.limes.pink/api/server/26aArMy8tT?style=flat)](https://discord.gg/26aArMy8tT) +[![Discord](https://dcbadge.vercel.app/api/server/26aArMy8tT?style=flat&compact=true)](https://discord.gg/26aArMy8tT) ![preview](.assets/perplexica-screenshot.png?) @@ -90,9 +90,6 @@ There are mainly 2 ways of installing Perplexica - With Docker, Without Docker. - `OLLAMA`: Your Ollama API URL. You should enter it as `http://host.docker.internal:PORT_NUMBER`. If you installed Ollama on port 11434, use `http://host.docker.internal:11434`. For other ports, adjust accordingly. **You need to fill this if you wish to use Ollama's models instead of OpenAI's**. - `GROQ`: Your Groq API key. **You only need to fill this if you wish to use Groq's hosted models**. - `ANTHROPIC`: Your Anthropic API key. **You only need to fill this if you wish to use Anthropic models**. - - `Gemini`: Your Gemini API key. **You only need to fill this if you wish to use Google's models**. - - `DEEPSEEK`: Your Deepseek API key. **Only needed if you want Deepseek models.** - - `AIMLAPI`: Your AI/ML API key. **Only needed if you want to use AI/ML API models and embeddings.** **Note**: You can change these after starting Perplexica from the settings dialog. @@ -114,7 +111,7 @@ There are mainly 2 ways of installing Perplexica - With Docker, Without Docker. 2. Clone the repository and rename the `sample.config.toml` file to `config.toml` in the root directory. Ensure you complete all required fields in this file. 3. After populating the configuration run `npm i`. 4. Install the dependencies and then execute `npm run build`. -5. Finally, start the app by running `npm run start` +5. Finally, start the app by running `npm rum start` **Note**: Using Docker is recommended as it simplifies the setup process, especially for managing environment variables and dependencies. @@ -135,7 +132,7 @@ If you're encountering an Ollama connection error, it is likely due to the backe 3. **Linux Users - Expose Ollama to Network:** - - Inside `/etc/systemd/system/ollama.service`, you need to add `Environment="OLLAMA_HOST=0.0.0.0:11434"`. (Change the port number if you are using a different one.) Then reload the systemd manager configuration with `systemctl daemon-reload`, and restart Ollama by `systemctl restart ollama`. For more information see [Ollama docs](https://github.com/ollama/ollama/blob/main/docs/faq.md#setting-environment-variables-on-linux) + - Inside `/etc/systemd/system/ollama.service`, you need to add `Environment="OLLAMA_HOST=0.0.0.0"`. Then restart Ollama by `systemctl restart ollama`. For more information see [Ollama docs](https://github.com/ollama/ollama/blob/main/docs/faq.md#setting-environment-variables-on-linux) - Ensure that the port (default is 11434) is not blocked by your firewall. @@ -156,13 +153,12 @@ For more details, check out the full documentation [here](https://github.com/Itz ## Expose Perplexica to network -Perplexica runs on Next.js and handles all API requests. It works right away on the same network and stays accessible even with port forwarding. +You can access Perplexica over your home network by following our networking guide [here](https://github.com/ItzCrazyKns/Perplexica/blob/master/docs/installation/NETWORKING.md). ## One-Click Deployment [![Deploy to Sealos](https://raw.githubusercontent.com/labring-actions/templates/main/Deploy-on-Sealos.svg)](https://usw.sealos.io/?openapp=system-template%3FtemplateName%3Dperplexica) [![Deploy to RepoCloud](https://d16t0pc4846x52.cloudfront.net/deploylobe.svg)](https://repocloud.io/details/?app_id=267) -[![Run on ClawCloud](https://raw.githubusercontent.com/ClawCloud/Run-Template/refs/heads/main/Run-on-ClawCloud.svg)](https://template.run.claw.cloud/?referralCode=U11MRQ8U9RM4&openapp=system-fastdeploy%3FtemplateName%3Dperplexica) ## Upcoming Features diff --git a/app.dockerfile b/app.dockerfile index c3c0fd0..57a270e 100644 --- a/app.dockerfile +++ b/app.dockerfile @@ -1,4 +1,4 @@ -FROM node:20.18.0-slim AS builder +FROM node:20.18.0-alpine AS builder WORKDIR /home/perplexica @@ -12,10 +12,7 @@ COPY public ./public RUN mkdir -p /home/perplexica/data RUN yarn build -RUN yarn add --dev @vercel/ncc -RUN yarn ncc build ./src/lib/db/migrate.ts -o migrator - -FROM node:20.18.0-slim +FROM node:20.18.0-alpine WORKDIR /home/perplexica @@ -24,12 +21,7 @@ COPY --from=builder /home/perplexica/.next/static ./public/_next/static COPY --from=builder /home/perplexica/.next/standalone ./ COPY --from=builder /home/perplexica/data ./data -COPY drizzle ./drizzle -COPY --from=builder /home/perplexica/migrator/build ./build -COPY --from=builder /home/perplexica/migrator/index.js ./migrate.js RUN mkdir /home/perplexica/uploads -COPY entrypoint.sh ./entrypoint.sh -RUN chmod +x ./entrypoint.sh -CMD ["./entrypoint.sh"] \ No newline at end of file +CMD ["node", "server.js"] \ No newline at end of file diff --git a/docker-compose.yaml b/docker-compose.yaml index b32e0a9..b702b4e 100644 --- a/docker-compose.yaml +++ b/docker-compose.yaml @@ -16,7 +16,6 @@ services: dockerfile: app.dockerfile environment: - SEARXNG_API_URL=http://searxng:8080 - - DATA_DIR=/home/perplexica ports: - 3000:3000 networks: diff --git a/docs/API/SEARCH.md b/docs/API/SEARCH.md index b67b62b..3007901 100644 --- a/docs/API/SEARCH.md +++ b/docs/API/SEARCH.md @@ -32,9 +32,7 @@ The API accepts a JSON object in the request body, where you define the focus mo "history": [ ["human", "Hi, how are you?"], ["assistant", "I am doing well, how can I help you today?"] - ], - "systemInstructions": "Focus on providing technical details about Perplexica's architecture.", - "stream": false + ] } ``` @@ -64,8 +62,6 @@ The API accepts a JSON object in the request body, where you define the focus mo - **`query`** (string, required): The search query or question. -- **`systemInstructions`** (string, optional): Custom instructions provided by the user to guide the AI's response. These instructions are treated as user preferences and have lower priority than the system's core instructions. For example, you can specify a particular writing style, format, or focus area. - - **`history`** (array, optional): An array of message pairs representing the conversation history. Each pair consists of a role (either 'human' or 'assistant') and the message content. This allows the system to use the context of the conversation to refine results. Example: ```json @@ -75,13 +71,11 @@ The API accepts a JSON object in the request body, where you define the focus mo ] ``` -- **`stream`** (boolean, optional): When set to `true`, enables streaming responses. Default is `false`. - ### Response The response from the API includes both the final message and the sources used to generate that message. -#### Standard Response (stream: false) +#### Example Response ```json { @@ -106,28 +100,6 @@ The response from the API includes both the final message and the sources used t } ``` -#### Streaming Response (stream: true) - -When streaming is enabled, the API returns a stream of newline-delimited JSON objects. Each line contains a complete, valid JSON object. The response has Content-Type: application/json. - -Example of streamed response objects: - -``` -{"type":"init","data":"Stream connected"} -{"type":"sources","data":[{"pageContent":"...","metadata":{"title":"...","url":"..."}},...]} -{"type":"response","data":"Perplexica is an "} -{"type":"response","data":"innovative, open-source "} -{"type":"response","data":"AI-powered search engine..."} -{"type":"done"} -``` - -Clients should process each line as a separate JSON object. The different message types include: - -- **`init`**: Initial connection message -- **`sources`**: All sources used for the response -- **`response`**: Chunks of the generated answer text -- **`done`**: Indicates the stream is complete - ### Fields in the Response - **`message`** (string): The search result, generated based on the query and focus mode. diff --git a/docs/installation/UPDATING.md b/docs/installation/UPDATING.md index 66edf5c..972142f 100644 --- a/docs/installation/UPDATING.md +++ b/docs/installation/UPDATING.md @@ -41,6 +41,6 @@ To update Perplexica to the latest version, follow these steps: 3. Check for changes in the configuration files. If the `sample.config.toml` file contains new fields, delete your existing `config.toml` file, rename `sample.config.toml` to `config.toml`, and update the configuration accordingly. 4. After populating the configuration run `npm i`. 5. Install the dependencies and then execute `npm run build`. -6. Finally, start the app by running `npm run start` +6. Finally, start the app by running `npm rum start` --- diff --git a/drizzle.config.ts b/drizzle.config.ts index a029112..58de9e0 100644 --- a/drizzle.config.ts +++ b/drizzle.config.ts @@ -1,11 +1,10 @@ import { defineConfig } from 'drizzle-kit'; -import path from 'path'; export default defineConfig({ dialect: 'sqlite', schema: './src/lib/db/schema.ts', out: './drizzle', dbCredentials: { - url: path.join(process.cwd(), 'data', 'db.sqlite'), + url: './data/db.sqlite', }, }); diff --git a/drizzle/0000_fuzzy_randall.sql b/drizzle/0000_fuzzy_randall.sql deleted file mode 100644 index 0a2ff07..0000000 --- a/drizzle/0000_fuzzy_randall.sql +++ /dev/null @@ -1,16 +0,0 @@ -CREATE TABLE IF NOT EXISTS `chats` ( - `id` text PRIMARY KEY NOT NULL, - `title` text NOT NULL, - `createdAt` text NOT NULL, - `focusMode` text NOT NULL, - `files` text DEFAULT '[]' -); ---> statement-breakpoint -CREATE TABLE IF NOT EXISTS `messages` ( - `id` integer PRIMARY KEY NOT NULL, - `content` text NOT NULL, - `chatId` text NOT NULL, - `messageId` text NOT NULL, - `type` text, - `metadata` text -); diff --git a/drizzle/meta/0000_snapshot.json b/drizzle/meta/0000_snapshot.json deleted file mode 100644 index 850bcd3..0000000 --- a/drizzle/meta/0000_snapshot.json +++ /dev/null @@ -1,116 +0,0 @@ -{ - "version": "6", - "dialect": "sqlite", - "id": "ef3a044b-0f34-40b5-babb-2bb3a909ba27", - "prevId": "00000000-0000-0000-0000-000000000000", - "tables": { - "chats": { - "name": "chats", - "columns": { - "id": { - "name": "id", - "type": "text", - "primaryKey": true, - "notNull": true, - "autoincrement": false - }, - "title": { - "name": "title", - "type": "text", - "primaryKey": false, - "notNull": true, - "autoincrement": false - }, - "createdAt": { - "name": "createdAt", - "type": "text", - "primaryKey": false, - "notNull": true, - "autoincrement": false - }, - "focusMode": { - "name": "focusMode", - "type": "text", - "primaryKey": false, - "notNull": true, - "autoincrement": false - }, - "files": { - "name": "files", - "type": "text", - "primaryKey": false, - "notNull": false, - "autoincrement": false, - "default": "'[]'" - } - }, - "indexes": {}, - "foreignKeys": {}, - "compositePrimaryKeys": {}, - "uniqueConstraints": {}, - "checkConstraints": {} - }, - "messages": { - "name": "messages", - "columns": { - "id": { - "name": "id", - "type": "integer", - "primaryKey": true, - "notNull": true, - "autoincrement": false - }, - "content": { - "name": "content", - "type": "text", - "primaryKey": false, - "notNull": true, - "autoincrement": false - }, - "chatId": { - "name": "chatId", - "type": "text", - "primaryKey": false, - "notNull": true, - "autoincrement": false - }, - "messageId": { - "name": "messageId", - "type": "text", - "primaryKey": false, - "notNull": true, - "autoincrement": false - }, - "type": { - "name": "type", - "type": "text", - "primaryKey": false, - "notNull": false, - "autoincrement": false - }, - "metadata": { - "name": "metadata", - "type": "text", - "primaryKey": false, - "notNull": false, - "autoincrement": false - } - }, - "indexes": {}, - "foreignKeys": {}, - "compositePrimaryKeys": {}, - "uniqueConstraints": {}, - "checkConstraints": {} - } - }, - "views": {}, - "enums": {}, - "_meta": { - "schemas": {}, - "tables": {}, - "columns": {} - }, - "internal": { - "indexes": {} - } -} diff --git a/drizzle/meta/_journal.json b/drizzle/meta/_journal.json deleted file mode 100644 index 5db59d1..0000000 --- a/drizzle/meta/_journal.json +++ /dev/null @@ -1,13 +0,0 @@ -{ - "version": "7", - "dialect": "sqlite", - "entries": [ - { - "idx": 0, - "version": "6", - "when": 1748405503809, - "tag": "0000_fuzzy_randall", - "breakpoints": true - } - ] -} diff --git a/entrypoint.sh b/entrypoint.sh deleted file mode 100644 index 9f9448a..0000000 --- a/entrypoint.sh +++ /dev/null @@ -1,6 +0,0 @@ -#!/bin/sh -set -e - -node migrate.js - -exec node server.js \ No newline at end of file diff --git a/package.json b/package.json index 5715c2a..e2cf944 100644 --- a/package.json +++ b/package.json @@ -1,6 +1,6 @@ { "name": "perplexica-frontend", - "version": "1.11.0-rc2", + "version": "1.10.0", "license": "MIT", "author": "ItzCrazyKns", "scripts": { @@ -15,13 +15,9 @@ "@headlessui/react": "^2.2.0", "@iarna/toml": "^2.2.5", "@icons-pack/react-simple-icons": "^12.3.0", - "@langchain/anthropic": "^0.3.24", - "@langchain/community": "^0.3.49", - "@langchain/core": "^0.3.66", - "@langchain/google-genai": "^0.2.15", - "@langchain/groq": "^0.2.3", - "@langchain/ollama": "^0.2.3", - "@langchain/openai": "^0.6.2", + "@langchain/community": "^0.3.36", + "@langchain/core": "^0.3.42", + "@langchain/openai": "^0.0.25", "@langchain/textsplitters": "^0.1.0", "@tailwindcss/typography": "^0.5.12", "@xenova/transformers": "^2.17.2", @@ -32,10 +28,8 @@ "compute-dot": "^1.1.0", "drizzle-orm": "^0.40.1", "html-to-text": "^9.0.5", - "jspdf": "^3.0.1", - "langchain": "^0.3.30", + "langchain": "^0.1.30", "lucide-react": "^0.363.0", - "mammoth": "^1.9.1", "markdown-to-jsx": "^7.7.2", "next": "^15.2.2", "next-themes": "^0.3.0", @@ -53,7 +47,6 @@ "devDependencies": { "@types/better-sqlite3": "^7.6.12", "@types/html-to-text": "^9.0.4", - "@types/jspdf": "^2.0.0", "@types/node": "^20", "@types/pdf-parse": "^1.1.4", "@types/react": "^18", diff --git a/public/icon-100.png b/public/icon-100.png deleted file mode 100644 index 98fa242..0000000 Binary files a/public/icon-100.png and /dev/null differ diff --git a/public/icon-50.png b/public/icon-50.png deleted file mode 100644 index 9bb7a0e..0000000 Binary files a/public/icon-50.png and /dev/null differ diff --git a/public/icon.png b/public/icon.png deleted file mode 100644 index f6fe3c7..0000000 Binary files a/public/icon.png and /dev/null differ diff --git a/public/screenshots/p1.png b/public/screenshots/p1.png deleted file mode 100644 index 02f01e5..0000000 Binary files a/public/screenshots/p1.png and /dev/null differ diff --git a/public/screenshots/p1_small.png b/public/screenshots/p1_small.png deleted file mode 100644 index 13d9a42..0000000 Binary files a/public/screenshots/p1_small.png and /dev/null differ diff --git a/public/screenshots/p2.png b/public/screenshots/p2.png deleted file mode 100644 index 1171675..0000000 Binary files a/public/screenshots/p2.png and /dev/null differ diff --git a/public/screenshots/p2_small.png b/public/screenshots/p2_small.png deleted file mode 100644 index bd8d673..0000000 Binary files a/public/screenshots/p2_small.png and /dev/null differ diff --git a/public/weather-ico/clear-day.svg b/public/weather-ico/clear-day.svg deleted file mode 100644 index d97d28b..0000000 --- a/public/weather-ico/clear-day.svg +++ /dev/null @@ -1,131 +0,0 @@ - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - \ No newline at end of file diff --git a/public/weather-ico/clear-night.svg b/public/weather-ico/clear-night.svg deleted file mode 100644 index 005ac63..0000000 --- a/public/weather-ico/clear-night.svg +++ /dev/null @@ -1,159 +0,0 @@ - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - \ No newline at end of file diff --git a/public/weather-ico/cloudy-1-day.svg b/public/weather-ico/cloudy-1-day.svg deleted file mode 100644 index 823fea1..0000000 --- a/public/weather-ico/cloudy-1-day.svg +++ /dev/null @@ -1,178 +0,0 @@ - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - \ No newline at end of file diff --git a/public/weather-ico/cloudy-1-night.svg b/public/weather-ico/cloudy-1-night.svg deleted file mode 100644 index 3fe1541..0000000 --- a/public/weather-ico/cloudy-1-night.svg +++ /dev/null @@ -1,206 +0,0 @@ - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - \ No newline at end of file diff --git a/public/weather-ico/fog-day.svg b/public/weather-ico/fog-day.svg deleted file mode 100644 index ed834cf..0000000 --- a/public/weather-ico/fog-day.svg +++ /dev/null @@ -1,244 +0,0 @@ - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - F - - - - - - - - - - - - - - - - - - - - - \ No newline at end of file diff --git a/public/weather-ico/fog-night.svg b/public/weather-ico/fog-night.svg deleted file mode 100644 index d59f98f..0000000 --- a/public/weather-ico/fog-night.svg +++ /dev/null @@ -1,309 +0,0 @@ - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - \ No newline at end of file diff --git a/public/weather-ico/frost-day.svg b/public/weather-ico/frost-day.svg deleted file mode 100644 index 16d591c..0000000 --- a/public/weather-ico/frost-day.svg +++ /dev/null @@ -1,204 +0,0 @@ - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - F - - - - - - - - - - - - - - - - - - - - - - - - - \ No newline at end of file diff --git a/public/weather-ico/frost-night.svg b/public/weather-ico/frost-night.svg deleted file mode 100644 index ff2c8dc..0000000 --- a/public/weather-ico/frost-night.svg +++ /dev/null @@ -1,269 +0,0 @@ - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - \ No newline at end of file diff --git a/public/weather-ico/rain-and-sleet-mix.svg b/public/weather-ico/rain-and-sleet-mix.svg deleted file mode 100644 index 172010d..0000000 --- a/public/weather-ico/rain-and-sleet-mix.svg +++ /dev/null @@ -1,141 +0,0 @@ - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - \ No newline at end of file diff --git a/public/weather-ico/rainy-1-day.svg b/public/weather-ico/rainy-1-day.svg deleted file mode 100644 index 2faf06e..0000000 --- a/public/weather-ico/rainy-1-day.svg +++ /dev/null @@ -1,179 +0,0 @@ - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - \ No newline at end of file diff --git a/public/weather-ico/rainy-1-night.svg b/public/weather-ico/rainy-1-night.svg deleted file mode 100644 index ee8ffd8..0000000 --- a/public/weather-ico/rainy-1-night.svg +++ /dev/null @@ -1,243 +0,0 @@ - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - \ No newline at end of file diff --git a/public/weather-ico/rainy-2-day.svg b/public/weather-ico/rainy-2-day.svg deleted file mode 100644 index affdfff..0000000 --- a/public/weather-ico/rainy-2-day.svg +++ /dev/null @@ -1,204 +0,0 @@ - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - \ No newline at end of file diff --git a/public/weather-ico/rainy-2-night.svg b/public/weather-ico/rainy-2-night.svg deleted file mode 100644 index 9c3ae20..0000000 --- a/public/weather-ico/rainy-2-night.svg +++ /dev/null @@ -1,256 +0,0 @@ - - - - - - - - - - - - - - - - - - - - - - - - - - \ No newline at end of file diff --git a/public/weather-ico/rainy-3-day.svg b/public/weather-ico/rainy-3-day.svg deleted file mode 100644 index b0b5754..0000000 --- a/public/weather-ico/rainy-3-day.svg +++ /dev/null @@ -1,206 +0,0 @@ - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - \ No newline at end of file diff --git a/public/weather-ico/rainy-3-night.svg b/public/weather-ico/rainy-3-night.svg deleted file mode 100644 index 4078e7d..0000000 --- a/public/weather-ico/rainy-3-night.svg +++ /dev/null @@ -1,270 +0,0 @@ - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - \ No newline at end of file diff --git a/public/weather-ico/scattered-thunderstorms-day.svg b/public/weather-ico/scattered-thunderstorms-day.svg deleted file mode 100644 index 0cfbccc..0000000 --- a/public/weather-ico/scattered-thunderstorms-day.svg +++ /dev/null @@ -1,374 +0,0 @@ - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - \ No newline at end of file diff --git a/public/weather-ico/scattered-thunderstorms-night.svg b/public/weather-ico/scattered-thunderstorms-night.svg deleted file mode 100644 index 72cf7a6..0000000 --- a/public/weather-ico/scattered-thunderstorms-night.svg +++ /dev/null @@ -1,283 +0,0 @@ - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - \ No newline at end of file diff --git a/public/weather-ico/severe-thunderstorm.svg b/public/weather-ico/severe-thunderstorm.svg deleted file mode 100644 index 223198b..0000000 --- a/public/weather-ico/severe-thunderstorm.svg +++ /dev/null @@ -1,307 +0,0 @@ - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - \ No newline at end of file diff --git a/public/weather-ico/snowy-1-day.svg b/public/weather-ico/snowy-1-day.svg deleted file mode 100644 index fb73943..0000000 --- a/public/weather-ico/snowy-1-day.svg +++ /dev/null @@ -1,241 +0,0 @@ - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - \ No newline at end of file diff --git a/public/weather-ico/snowy-1-night.svg b/public/weather-ico/snowy-1-night.svg deleted file mode 100644 index 039ea2e..0000000 --- a/public/weather-ico/snowy-1-night.svg +++ /dev/null @@ -1,269 +0,0 @@ - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - \ No newline at end of file diff --git a/public/weather-ico/snowy-2-day.svg b/public/weather-ico/snowy-2-day.svg deleted file mode 100644 index 323a616..0000000 --- a/public/weather-ico/snowy-2-day.svg +++ /dev/null @@ -1,273 +0,0 @@ - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - \ No newline at end of file diff --git a/public/weather-ico/snowy-2-night.svg b/public/weather-ico/snowy-2-night.svg deleted file mode 100644 index 10dcbfa..0000000 --- a/public/weather-ico/snowy-2-night.svg +++ /dev/null @@ -1,301 +0,0 @@ - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - \ No newline at end of file diff --git a/public/weather-ico/snowy-3-day.svg b/public/weather-ico/snowy-3-day.svg deleted file mode 100644 index 846c17a..0000000 --- a/public/weather-ico/snowy-3-day.svg +++ /dev/null @@ -1,334 +0,0 @@ - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - \ No newline at end of file diff --git a/public/weather-ico/snowy-3-night.svg b/public/weather-ico/snowy-3-night.svg deleted file mode 100644 index b3c8c24..0000000 --- a/public/weather-ico/snowy-3-night.svg +++ /dev/null @@ -1,361 +0,0 @@ - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - \ No newline at end of file diff --git a/sample.config.toml b/sample.config.toml index ba3e98e..691b964 100644 --- a/sample.config.toml +++ b/sample.config.toml @@ -22,14 +22,5 @@ MODEL_NAME = "" [MODELS.OLLAMA] API_URL = "" # Ollama API URL - http://host.docker.internal:11434 -[MODELS.DEEPSEEK] -API_KEY = "" - -[MODELS.AIMLAPI] -API_KEY = "" # Required to use AI/ML API chat and embedding models - -[MODELS.LM_STUDIO] -API_URL = "" # LM Studio API URL - http://host.docker.internal:1234 - [API_ENDPOINTS] -SEARXNG = "" # SearxNG API URL - http://localhost:32768 +SEARXNG = "" # SearxNG API URL - http://localhost:32768 \ No newline at end of file diff --git a/src/app/api/chat/route.ts b/src/app/api/chat/route.ts index ba88da6..d9f9c6b 100644 --- a/src/app/api/chat/route.ts +++ b/src/app/api/chat/route.ts @@ -1,7 +1,11 @@ +import prompts from '@/lib/prompts'; +import MetaSearchAgent from '@/lib/search/metaSearchAgent'; import crypto from 'crypto'; import { AIMessage, BaseMessage, HumanMessage } from '@langchain/core/messages'; import { EventEmitter } from 'stream'; import { + chatModelProviders, + embeddingModelProviders, getAvailableChatModelProviders, getAvailableEmbeddingModelProviders, } from '@/lib/providers'; @@ -45,7 +49,6 @@ type Body = { files: Array; chatModel: ChatModel; embeddingModel: EmbeddingModel; - systemInstructions: string; }; const handleEmitterEvents = async ( @@ -134,8 +137,6 @@ const handleHistorySave = async ( where: eq(chats.id, message.chatId), }); - const fileData = files.map(getFileDetails); - if (!chat) { await db .insert(chats) @@ -144,15 +145,9 @@ const handleHistorySave = async ( title: message.content, createdAt: new Date().toString(), focusMode: focusMode, - files: fileData, - }) - .execute(); - } else if (JSON.stringify(chat.files ?? []) != JSON.stringify(fileData)) { - db.update(chats) - .set({ files: files.map(getFileDetails), }) - .where(eq(chats.id, message.chatId)); + .execute(); } const messageExists = await db.query.messages.findFirst({ @@ -227,7 +222,7 @@ export const POST = async (req: Request) => { if (body.chatModel?.provider === 'custom_openai') { llm = new ChatOpenAI({ - apiKey: getCustomOpenaiApiKey(), + openAIApiKey: getCustomOpenaiApiKey(), modelName: getCustomOpenaiModelName(), temperature: 0.7, configuration: { @@ -283,7 +278,6 @@ export const POST = async (req: Request) => { embedding, body.optimizationMode, body.files, - body.systemInstructions, ); const responseStream = new TransformStream(); @@ -301,9 +295,9 @@ export const POST = async (req: Request) => { }, }); } catch (err) { - console.error('An error occurred while processing chat request:', err); + console.error('An error ocurred while processing chat request:', err); return Response.json( - { message: 'An error occurred while processing chat request' }, + { message: 'An error ocurred while processing chat request' }, { status: 500 }, ); } diff --git a/src/app/api/config/route.ts b/src/app/api/config/route.ts index f117cce..46c71f5 100644 --- a/src/app/api/config/route.ts +++ b/src/app/api/config/route.ts @@ -7,11 +7,7 @@ import { getGroqApiKey, getOllamaApiEndpoint, getOpenaiApiKey, - getDeepseekApiKey, - getAimlApiKey, - getLMStudioApiEndpoint, updateConfig, - getOllamaApiKey, } from '@/lib/config'; import { getAvailableChatModelProviders, @@ -54,22 +50,18 @@ export const GET = async (req: Request) => { config['openaiApiKey'] = getOpenaiApiKey(); config['ollamaApiUrl'] = getOllamaApiEndpoint(); - config['ollamaApiKey'] = getOllamaApiKey(); - config['lmStudioApiUrl'] = getLMStudioApiEndpoint(); config['anthropicApiKey'] = getAnthropicApiKey(); config['groqApiKey'] = getGroqApiKey(); config['geminiApiKey'] = getGeminiApiKey(); - config['deepseekApiKey'] = getDeepseekApiKey(); - config['aimlApiKey'] = getAimlApiKey(); config['customOpenaiApiUrl'] = getCustomOpenaiApiUrl(); config['customOpenaiApiKey'] = getCustomOpenaiApiKey(); config['customOpenaiModelName'] = getCustomOpenaiModelName(); return Response.json({ ...config }, { status: 200 }); } catch (err) { - console.error('An error occurred while getting config:', err); + console.error('An error ocurred while getting config:', err); return Response.json( - { message: 'An error occurred while getting config' }, + { message: 'An error ocurred while getting config' }, { status: 500 }, ); } @@ -95,16 +87,6 @@ export const POST = async (req: Request) => { }, OLLAMA: { API_URL: config.ollamaApiUrl, - API_KEY: config.ollamaApiKey, - }, - DEEPSEEK: { - API_KEY: config.deepseekApiKey, - }, - AIMLAPI: { - API_KEY: config.aimlApiKey, - }, - LM_STUDIO: { - API_URL: config.lmStudioApiUrl, }, CUSTOM_OPENAI: { API_URL: config.customOpenaiApiUrl, @@ -118,9 +100,9 @@ export const POST = async (req: Request) => { return Response.json({ message: 'Config updated' }, { status: 200 }); } catch (err) { - console.error('An error occurred while updating config:', err); + console.error('An error ocurred while updating config:', err); return Response.json( - { message: 'An error occurred while updating config' }, + { message: 'An error ocurred while updating config' }, { status: 500 }, ); } diff --git a/src/app/api/discover/route.ts b/src/app/api/discover/route.ts index 415aee8..0c95498 100644 --- a/src/app/api/discover/route.ts +++ b/src/app/api/discover/route.ts @@ -1,80 +1,43 @@ import { searchSearxng } from '@/lib/searxng'; -const websitesForTopic = { - tech: { - query: ['technology news', 'latest tech', 'AI', 'science and innovation'], - links: ['techcrunch.com', 'wired.com', 'theverge.com'], - }, - finance: { - query: ['finance news', 'economy', 'stock market', 'investing'], - links: ['bloomberg.com', 'cnbc.com', 'marketwatch.com'], - }, - art: { - query: ['art news', 'culture', 'modern art', 'cultural events'], - links: ['artnews.com', 'hyperallergic.com', 'theartnewspaper.com'], - }, - sports: { - query: ['sports news', 'latest sports', 'cricket football tennis'], - links: ['espn.com', 'bbc.com/sport', 'skysports.com'], - }, - entertainment: { - query: ['entertainment news', 'movies', 'TV shows', 'celebrities'], - links: ['hollywoodreporter.com', 'variety.com', 'deadline.com'], - }, -}; +const articleWebsites = [ + 'yahoo.com', + 'www.exchangewire.com', + 'businessinsider.com', + /* 'wired.com', + 'mashable.com', + 'theverge.com', + 'gizmodo.com', + 'cnet.com', + 'venturebeat.com', */ +]; -type Topic = keyof typeof websitesForTopic; +const topics = ['AI', 'tech']; /* TODO: Add UI to customize this */ export const GET = async (req: Request) => { try { - const params = new URL(req.url).searchParams; - - const mode: 'normal' | 'preview' = - (params.get('mode') as 'normal' | 'preview') || 'normal'; - const topic: Topic = (params.get('topic') as Topic) || 'tech'; - - const selectedTopic = websitesForTopic[topic]; - - let data = []; - - if (mode === 'normal') { - const seenUrls = new Set(); - - data = ( - await Promise.all( - selectedTopic.links.flatMap((link) => - selectedTopic.query.map(async (query) => { - return ( - await searchSearxng(`site:${link} ${query}`, { + const data = ( + await Promise.all([ + ...new Array(articleWebsites.length * topics.length) + .fill(0) + .map(async (_, i) => { + return ( + await searchSearxng( + `site:${articleWebsites[i % articleWebsites.length]} ${ + topics[i % topics.length] + }`, + { engines: ['bing news'], pageno: 1, - language: 'en', - }) - ).results; - }), - ), - ) - ) - .flat() - .filter((item) => { - const url = item.url?.toLowerCase().trim(); - if (seenUrls.has(url)) return false; - seenUrls.add(url); - return true; - }) - .sort(() => Math.random() - 0.5); - } else { - data = ( - await searchSearxng( - `site:${selectedTopic.links[Math.floor(Math.random() * selectedTopic.links.length)]} ${selectedTopic.query[Math.floor(Math.random() * selectedTopic.query.length)]}`, - { - engines: ['bing news'], - pageno: 1, - language: 'en', - }, - ) - ).results; - } + }, + ) + ).results; + }), + ]) + ) + .map((result) => result) + .flat() + .sort(() => Math.random() - 0.5); return Response.json( { @@ -85,7 +48,7 @@ export const GET = async (req: Request) => { }, ); } catch (err) { - console.error(`An error occurred in discover route: ${err}`); + console.error(`An error ocurred in discover route: ${err}`); return Response.json( { message: 'An error has occurred', diff --git a/src/app/api/images/route.ts b/src/app/api/images/route.ts index e02854d..f0a6773 100644 --- a/src/app/api/images/route.ts +++ b/src/app/api/images/route.ts @@ -49,7 +49,7 @@ export const POST = async (req: Request) => { if (body.chatModel?.provider === 'custom_openai') { llm = new ChatOpenAI({ - apiKey: getCustomOpenaiApiKey(), + openAIApiKey: getCustomOpenaiApiKey(), modelName: getCustomOpenaiModelName(), temperature: 0.7, configuration: { @@ -74,9 +74,9 @@ export const POST = async (req: Request) => { return Response.json({ images }, { status: 200 }); } catch (err) { - console.error(`An error occurred while searching images: ${err}`); + console.error(`An error ocurred while searching images: ${err}`); return Response.json( - { message: 'An error occurred while searching images' }, + { message: 'An error ocurred while searching images' }, { status: 500 }, ); } diff --git a/src/app/api/models/route.ts b/src/app/api/models/route.ts index 04a6949..a5e5b43 100644 --- a/src/app/api/models/route.ts +++ b/src/app/api/models/route.ts @@ -34,7 +34,7 @@ export const GET = async (req: Request) => { }, ); } catch (err) { - console.error('An error occurred while fetching models', err); + console.error('An error ocurred while fetching models', err); return Response.json( { message: 'An error has occurred.', diff --git a/src/app/api/search/route.ts b/src/app/api/search/route.ts index 5f752ec..b980623 100644 --- a/src/app/api/search/route.ts +++ b/src/app/api/search/route.ts @@ -33,8 +33,6 @@ interface ChatRequestBody { embeddingModel?: embeddingModel; query: string; history: Array<[string, string]>; - stream?: boolean; - systemInstructions?: string; } export const POST = async (req: Request) => { @@ -50,7 +48,6 @@ export const POST = async (req: Request) => { body.history = body.history || []; body.optimizationMode = body.optimizationMode || 'balanced'; - body.stream = body.stream || false; const history: BaseMessage[] = body.history.map((msg) => { return msg[0] === 'human' @@ -81,7 +78,8 @@ export const POST = async (req: Request) => { if (body.chatModel?.provider === 'custom_openai') { llm = new ChatOpenAI({ modelName: body.chatModel?.name || getCustomOpenaiModelName(), - apiKey: body.chatModel?.customOpenAIKey || getCustomOpenaiApiKey(), + openAIApiKey: + body.chatModel?.customOpenAIKey || getCustomOpenaiApiKey(), temperature: 0.7, configuration: { baseURL: @@ -125,140 +123,42 @@ export const POST = async (req: Request) => { embeddings, body.optimizationMode, [], - body.systemInstructions || '', ); - if (!body.stream) { - return new Promise( - ( - resolve: (value: Response) => void, - reject: (value: Response) => void, - ) => { - let message = ''; - let sources: any[] = []; - - emitter.on('data', (data: string) => { - try { - const parsedData = JSON.parse(data); - if (parsedData.type === 'response') { - message += parsedData.data; - } else if (parsedData.type === 'sources') { - sources = parsedData.data; - } - } catch (error) { - reject( - Response.json( - { message: 'Error parsing data' }, - { status: 500 }, - ), - ); - } - }); - - emitter.on('end', () => { - resolve(Response.json({ message, sources }, { status: 200 })); - }); - - emitter.on('error', (error: any) => { - reject( - Response.json( - { message: 'Search error', error }, - { status: 500 }, - ), - ); - }); - }, - ); - } - - const encoder = new TextEncoder(); - - const abortController = new AbortController(); - const { signal } = abortController; - - const stream = new ReadableStream({ - start(controller) { + return new Promise( + ( + resolve: (value: Response) => void, + reject: (value: Response) => void, + ) => { + let message = ''; let sources: any[] = []; - controller.enqueue( - encoder.encode( - JSON.stringify({ - type: 'init', - data: 'Stream connected', - }) + '\n', - ), - ); - - signal.addEventListener('abort', () => { - emitter.removeAllListeners(); - - try { - controller.close(); - } catch (error) {} - }); - - emitter.on('data', (data: string) => { - if (signal.aborted) return; - + emitter.on('data', (data) => { try { const parsedData = JSON.parse(data); - if (parsedData.type === 'response') { - controller.enqueue( - encoder.encode( - JSON.stringify({ - type: 'response', - data: parsedData.data, - }) + '\n', - ), - ); + message += parsedData.data; } else if (parsedData.type === 'sources') { sources = parsedData.data; - controller.enqueue( - encoder.encode( - JSON.stringify({ - type: 'sources', - data: sources, - }) + '\n', - ), - ); } } catch (error) { - controller.error(error); + reject( + Response.json({ message: 'Error parsing data' }, { status: 500 }), + ); } }); emitter.on('end', () => { - if (signal.aborted) return; + resolve(Response.json({ message, sources }, { status: 200 })); + }); - controller.enqueue( - encoder.encode( - JSON.stringify({ - type: 'done', - }) + '\n', - ), + emitter.on('error', (error) => { + reject( + Response.json({ message: 'Search error', error }, { status: 500 }), ); - controller.close(); - }); - - emitter.on('error', (error: any) => { - if (signal.aborted) return; - - controller.error(error); }); }, - cancel() { - abortController.abort(); - }, - }); - - return new Response(stream, { - headers: { - 'Content-Type': 'text/event-stream', - 'Cache-Control': 'no-cache, no-transform', - Connection: 'keep-alive', - }, - }); + ); } catch (err: any) { console.error(`Error in getting search results: ${err.message}`); return Response.json( diff --git a/src/app/api/suggestions/route.ts b/src/app/api/suggestions/route.ts index 99179d2..4a931df 100644 --- a/src/app/api/suggestions/route.ts +++ b/src/app/api/suggestions/route.ts @@ -48,7 +48,7 @@ export const POST = async (req: Request) => { if (body.chatModel?.provider === 'custom_openai') { llm = new ChatOpenAI({ - apiKey: getCustomOpenaiApiKey(), + openAIApiKey: getCustomOpenaiApiKey(), modelName: getCustomOpenaiModelName(), temperature: 0.7, configuration: { @@ -72,9 +72,9 @@ export const POST = async (req: Request) => { return Response.json({ suggestions }, { status: 200 }); } catch (err) { - console.error(`An error occurred while generating suggestions: ${err}`); + console.error(`An error ocurred while generating suggestions: ${err}`); return Response.json( - { message: 'An error occurred while generating suggestions' }, + { message: 'An error ocurred while generating suggestions' }, { status: 500 }, ); } diff --git a/src/app/api/videos/route.ts b/src/app/api/videos/route.ts index 7e8288b..6153490 100644 --- a/src/app/api/videos/route.ts +++ b/src/app/api/videos/route.ts @@ -49,7 +49,7 @@ export const POST = async (req: Request) => { if (body.chatModel?.provider === 'custom_openai') { llm = new ChatOpenAI({ - apiKey: getCustomOpenaiApiKey(), + openAIApiKey: getCustomOpenaiApiKey(), modelName: getCustomOpenaiModelName(), temperature: 0.7, configuration: { @@ -74,9 +74,9 @@ export const POST = async (req: Request) => { return Response.json({ videos }, { status: 200 }); } catch (err) { - console.error(`An error occurred while searching videos: ${err}`); + console.error(`An error ocurred while searching videos: ${err}`); return Response.json( - { message: 'An error occurred while searching videos' }, + { message: 'An error ocurred while searching videos' }, { status: 500 }, ); } diff --git a/src/app/api/weather/route.ts b/src/app/api/weather/route.ts deleted file mode 100644 index afaf8a6..0000000 --- a/src/app/api/weather/route.ts +++ /dev/null @@ -1,174 +0,0 @@ -export const POST = async (req: Request) => { - try { - const body: { - lat: number; - lng: number; - measureUnit: 'Imperial' | 'Metric'; - } = await req.json(); - - if (!body.lat || !body.lng) { - return Response.json( - { - message: 'Invalid request.', - }, - { status: 400 }, - ); - } - - const res = await fetch( - `https://api.open-meteo.com/v1/forecast?latitude=${body.lat}&longitude=${body.lng}¤t=weather_code,temperature_2m,is_day,relative_humidity_2m,wind_speed_10m&timezone=auto${ - body.measureUnit === 'Metric' ? '' : '&temperature_unit=fahrenheit' - }${body.measureUnit === 'Metric' ? '' : '&wind_speed_unit=mph'}`, - ); - - const data = await res.json(); - - if (data.error) { - console.error(`Error fetching weather data: ${data.reason}`); - return Response.json( - { - message: 'An error has occurred.', - }, - { status: 500 }, - ); - } - - const weather: { - temperature: number; - condition: string; - humidity: number; - windSpeed: number; - icon: string; - temperatureUnit: 'C' | 'F'; - windSpeedUnit: 'm/s' | 'mph'; - } = { - temperature: data.current.temperature_2m, - condition: '', - humidity: data.current.relative_humidity_2m, - windSpeed: data.current.wind_speed_10m, - icon: '', - temperatureUnit: body.measureUnit === 'Metric' ? 'C' : 'F', - windSpeedUnit: body.measureUnit === 'Metric' ? 'm/s' : 'mph', - }; - - const code = data.current.weather_code; - const isDay = data.current.is_day === 1; - const dayOrNight = isDay ? 'day' : 'night'; - - switch (code) { - case 0: - weather.icon = `clear-${dayOrNight}`; - weather.condition = 'Clear'; - break; - - case 1: - weather.condition = 'Mainly Clear'; - case 2: - weather.condition = 'Partly Cloudy'; - case 3: - weather.icon = `cloudy-1-${dayOrNight}`; - weather.condition = 'Cloudy'; - break; - - case 45: - weather.condition = 'Fog'; - case 48: - weather.icon = `fog-${dayOrNight}`; - weather.condition = 'Fog'; - break; - - case 51: - weather.condition = 'Light Drizzle'; - case 53: - weather.condition = 'Moderate Drizzle'; - case 55: - weather.icon = `rainy-1-${dayOrNight}`; - weather.condition = 'Dense Drizzle'; - break; - - case 56: - weather.condition = 'Light Freezing Drizzle'; - case 57: - weather.icon = `frost-${dayOrNight}`; - weather.condition = 'Dense Freezing Drizzle'; - break; - - case 61: - weather.condition = 'Slight Rain'; - case 63: - weather.condition = 'Moderate Rain'; - case 65: - weather.condition = 'Heavy Rain'; - weather.icon = `rainy-2-${dayOrNight}`; - break; - - case 66: - weather.condition = 'Light Freezing Rain'; - case 67: - weather.condition = 'Heavy Freezing Rain'; - weather.icon = 'rain-and-sleet-mix'; - break; - - case 71: - weather.condition = 'Slight Snow Fall'; - case 73: - weather.condition = 'Moderate Snow Fall'; - case 75: - weather.condition = 'Heavy Snow Fall'; - weather.icon = `snowy-2-${dayOrNight}`; - break; - - case 77: - weather.condition = 'Snow'; - weather.icon = `snowy-1-${dayOrNight}`; - break; - - case 80: - weather.condition = 'Slight Rain Showers'; - case 81: - weather.condition = 'Moderate Rain Showers'; - case 82: - weather.condition = 'Heavy Rain Showers'; - weather.icon = `rainy-3-${dayOrNight}`; - break; - - case 85: - weather.condition = 'Slight Snow Showers'; - case 86: - weather.condition = 'Moderate Snow Showers'; - case 87: - weather.condition = 'Heavy Snow Showers'; - weather.icon = `snowy-3-${dayOrNight}`; - break; - - case 95: - weather.condition = 'Thunderstorm'; - weather.icon = `scattered-thunderstorms-${dayOrNight}`; - break; - - case 96: - weather.condition = 'Thunderstorm with Slight Hail'; - case 99: - weather.condition = 'Thunderstorm with Heavy Hail'; - weather.icon = 'severe-thunderstorm'; - break; - - default: - weather.icon = `clear-${dayOrNight}`; - weather.condition = 'Clear'; - break; - } - - return Response.json(weather); - } catch (err) { - console.error('An error occurred while getting home widgets', err); - return Response.json( - { - message: 'An error has occurred.', - }, - { - status: 500, - }, - ); - } -}; diff --git a/src/app/c/[chatId]/page.tsx b/src/app/c/[chatId]/page.tsx index 672107a..aac125a 100644 --- a/src/app/c/[chatId]/page.tsx +++ b/src/app/c/[chatId]/page.tsx @@ -1,17 +1,9 @@ -'use client'; - import ChatWindow from '@/components/ChatWindow'; -import { useParams } from 'next/navigation'; import React from 'react'; -import { ChatProvider } from '@/lib/hooks/useChat'; -const Page = () => { - const { chatId }: { chatId: string } = useParams(); - return ( - - - - ); +const Page = ({ params }: { params: Promise<{ chatId: string }> }) => { + const { chatId } = React.use(params); + return ; }; export default Page; diff --git a/src/app/discover/page.tsx b/src/app/discover/page.tsx index 8e20e50..eb7de7f 100644 --- a/src/app/discover/page.tsx +++ b/src/app/discover/page.tsx @@ -4,7 +4,6 @@ import { Search } from 'lucide-react'; import { useEffect, useState } from 'react'; import Link from 'next/link'; import { toast } from 'sonner'; -import { cn } from '@/lib/utils'; interface Discover { title: string; @@ -13,66 +12,60 @@ interface Discover { thumbnail: string; } -const topics: { key: string; display: string }[] = [ - { - display: 'Tech & Science', - key: 'tech', - }, - { - display: 'Finance', - key: 'finance', - }, - { - display: 'Art & Culture', - key: 'art', - }, - { - display: 'Sports', - key: 'sports', - }, - { - display: 'Entertainment', - key: 'entertainment', - }, -]; - const Page = () => { const [discover, setDiscover] = useState(null); const [loading, setLoading] = useState(true); - const [activeTopic, setActiveTopic] = useState(topics[0].key); - - const fetchArticles = async (topic: string) => { - setLoading(true); - try { - const res = await fetch(`/api/discover?topic=${topic}`, { - method: 'GET', - headers: { - 'Content-Type': 'application/json', - }, - }); - - const data = await res.json(); - - if (!res.ok) { - throw new Error(data.message); - } - - data.blogs = data.blogs.filter((blog: Discover) => blog.thumbnail); - - setDiscover(data.blogs); - } catch (err: any) { - console.error('Error fetching data:', err.message); - toast.error('Error fetching data'); - } finally { - setLoading(false); - } - }; useEffect(() => { - fetchArticles(activeTopic); - }, [activeTopic]); + const fetchData = async () => { + try { + const res = await fetch(`/api/discover`, { + method: 'GET', + headers: { + 'Content-Type': 'application/json', + }, + }); - return ( + const data = await res.json(); + + if (!res.ok) { + throw new Error(data.message); + } + + data.blogs = data.blogs.filter((blog: Discover) => blog.thumbnail); + + setDiscover(data.blogs); + } catch (err: any) { + console.error('Error fetching data:', err.message); + toast.error('Error fetching data'); + } finally { + setLoading(false); + } + }; + + fetchData(); + }, []); + + return loading ? ( +
+ +
+ ) : ( <>
@@ -83,73 +76,35 @@ const Page = () => {
-
- {topics.map((t, i) => ( -
setActiveTopic(t.key)} - > - {t.display} -
- ))} -
- - {loading ? ( -
- -
- ) : ( -
- {discover && - discover?.map((item, i) => ( - - {item.title} -
-
- {item.title.slice(0, 100)}... -
-

- {item.content.slice(0, 100)}... -

+
+ {discover && + discover?.map((item, i) => ( + + {item.title} +
+
+ {item.title.slice(0, 100)}...
- - ))} -
- )} +

+ {item.content.slice(0, 100)}... +

+
+ + ))} +
); diff --git a/src/app/globals.css b/src/app/globals.css index 6bdc1a8..f75daca 100644 --- a/src/app/globals.css +++ b/src/app/globals.css @@ -11,11 +11,3 @@ display: none; } } - -@media screen and (-webkit-min-device-pixel-ratio: 0) { - select, - textarea, - input { - font-size: 16px !important; - } -} diff --git a/src/app/manifest.ts b/src/app/manifest.ts deleted file mode 100644 index 792e752..0000000 --- a/src/app/manifest.ts +++ /dev/null @@ -1,54 +0,0 @@ -import type { MetadataRoute } from 'next'; - -export default function manifest(): MetadataRoute.Manifest { - return { - name: 'Perplexica - Chat with the internet', - short_name: 'Perplexica', - description: - 'Perplexica is an AI powered chatbot that is connected to the internet.', - start_url: '/', - display: 'standalone', - background_color: '#0a0a0a', - theme_color: '#0a0a0a', - screenshots: [ - { - src: '/screenshots/p1.png', - form_factor: 'wide', - sizes: '2560x1600', - }, - { - src: '/screenshots/p2.png', - form_factor: 'wide', - sizes: '2560x1600', - }, - { - src: '/screenshots/p1_small.png', - form_factor: 'narrow', - sizes: '828x1792', - }, - { - src: '/screenshots/p2_small.png', - form_factor: 'narrow', - sizes: '828x1792', - }, - ], - icons: [ - { - src: '/icon-50.png', - sizes: '50x50', - type: 'image/png' as const, - }, - { - src: '/icon-100.png', - sizes: '100x100', - type: 'image/png', - }, - { - src: '/icon.png', - sizes: '440x440', - type: 'image/png', - purpose: 'any', - }, - ], - }; -} diff --git a/src/app/page.tsx b/src/app/page.tsx index 25981b5..e18aca9 100644 --- a/src/app/page.tsx +++ b/src/app/page.tsx @@ -1,5 +1,4 @@ import ChatWindow from '@/components/ChatWindow'; -import { ChatProvider } from '@/lib/hooks/useChat'; import { Metadata } from 'next'; import { Suspense } from 'react'; @@ -12,9 +11,7 @@ const Home = () => { return (
- - - +
); diff --git a/src/app/settings/page.tsx b/src/app/settings/page.tsx index 6fb8255..efe54d5 100644 --- a/src/app/settings/page.tsx +++ b/src/app/settings/page.tsx @@ -7,7 +7,6 @@ import { Switch } from '@headlessui/react'; import ThemeSwitcher from '@/components/theme/Switcher'; import { ImagesIcon, VideoIcon } from 'lucide-react'; import Link from 'next/link'; -import { PROVIDER_METADATA } from '@/lib/providers'; interface SettingsType { chatModelProviders: { @@ -21,10 +20,6 @@ interface SettingsType { anthropicApiKey: string; geminiApiKey: string; ollamaApiUrl: string; - ollamaApiKey: string; - lmStudioApiUrl: string; - deepseekApiKey: string; - aimlApiKey: string; customOpenaiApiKey: string; customOpenaiApiUrl: string; customOpenaiModelName: string; @@ -59,38 +54,6 @@ const Input = ({ className, isSaving, onSave, ...restProps }: InputProps) => { ); }; -interface TextareaProps extends React.InputHTMLAttributes { - isSaving?: boolean; - onSave?: (value: string) => void; -} - -const Textarea = ({ - className, - isSaving, - onSave, - ...restProps -}: TextareaProps) => { - return ( -
-