diff --git a/docker-compose.yml b/docker-compose.yml
index e72959e..fece23f 100644
--- a/docker-compose.yml
+++ b/docker-compose.yml
@@ -4,26 +4,26 @@ services:
container_name: redis-open3
restart: unless-stopped
# Uncomment this for development to expose Redis port
- # ports:
- # - "6379:6379"
+ ports:
+ - "6379:6379"
volumes:
- redis-data:/data
command: ["redis-server", "--save", "60", "1", "--loglevel", "warning"]
# Comment this out if you want to run a development environment locally
- web:
- build: .
- container_name: open3-web
- restart: unless-stopped
- ports:
- - "3000:3000"
- environment:
- - NODE_ENV=production
- - REDIS_URL=redis-open3:6379
- depends_on:
- - redis
- volumes:
- - upload-data:/app/public/uploads
+ # web:
+ # build: .
+ # container_name: open3-web
+ # restart: unless-stopped
+ # ports:
+ # - "3000:3000"
+ # environment:
+ # - NODE_ENV=production
+ # - REDIS_URL=redis-open3:6379
+ # depends_on:
+ # - redis
+ # volumes:
+ # - upload-data:/app/public/uploads
volumes:
redis-data:
diff --git a/package-lock.json b/package-lock.json
index a3d67d5..77e57f7 100644
--- a/package-lock.json
+++ b/package-lock.json
@@ -24,6 +24,7 @@
"parse-numeric-range": "^1.3.0",
"react": "^19.0.0",
"react-dom": "^19.0.0",
+ "react-intersection-observer": "^9.16.0",
"react-markdown": "^10.1.0",
"rehype-highlight": "^7.0.2",
"rehype-raw": "^7.0.0",
@@ -5968,6 +5969,21 @@
"react": "^19.1.0"
}
},
+ "node_modules/react-intersection-observer": {
+ "version": "9.16.0",
+ "resolved": "https://registry.npmjs.org/react-intersection-observer/-/react-intersection-observer-9.16.0.tgz",
+ "integrity": "sha512-w9nJSEp+DrW9KmQmeWHQyfaP6b03v+TdXynaoA964Wxt7mdR3An11z4NNCQgL4gKSK7y1ver2Fq+JKH6CWEzUA==",
+ "license": "MIT",
+ "peerDependencies": {
+ "react": "^17.0.0 || ^18.0.0 || ^19.0.0",
+ "react-dom": "^17.0.0 || ^18.0.0 || ^19.0.0"
+ },
+ "peerDependenciesMeta": {
+ "react-dom": {
+ "optional": true
+ }
+ }
+ },
"node_modules/react-is": {
"version": "16.13.1",
"dev": true,
diff --git a/package.json b/package.json
index bd03c8b..4cf39d1 100644
--- a/package.json
+++ b/package.json
@@ -37,6 +37,7 @@
"parse-numeric-range": "^1.3.0",
"react": "^19.0.0",
"react-dom": "^19.0.0",
+ "react-intersection-observer": "^9.16.0",
"react-markdown": "^10.1.0",
"rehype-highlight": "^7.0.2",
"rehype-raw": "^7.0.0",
@@ -57,4 +58,4 @@
"tailwindcss": "^4",
"typescript": "^5"
}
-}
\ No newline at end of file
+}
diff --git a/src/app/api/chat/[id]/messages/route.ts b/src/app/api/chat/[id]/messages/route.ts
index 73f16eb..9d03605 100644
--- a/src/app/api/chat/[id]/messages/route.ts
+++ b/src/app/api/chat/[id]/messages/route.ts
@@ -1,10 +1,18 @@
import { NextRequest, NextResponse } from "next/server";
import { auth, currentUser } from "@clerk/nextjs/server";
-import redis, { CHAT_GENERATING_KEY, CHAT_MESSAGES_KEY, USER_CHATS_KEY } from "@/internal-lib/redis";
+import redis, { CHAT_MESSAGES_KEY, USER_CHATS_KEY } from "@/internal-lib/redis";
import { Message } from "@/app/lib/types/ai";
import { ApiError } from "@/internal-lib/types/api";
-export async function GET(_: NextRequest, { params }: { params: Promise<{ id: string }> }) {
+export interface ChatMessagesResponse {
+ messages: Message[];
+ total: number;
+ page: number;
+ limit: number;
+ hasMore: boolean;
+}
+
+export async function GET(req: NextRequest, { params }: { params: Promise<{ id: string }> }) {
if (!redis) {
return NextResponse.json({ error: "Redis connection failure" } as ApiError, { status: 500 });
}
@@ -13,17 +21,50 @@ export async function GET(_: NextRequest, { params }: { params: Promise<{ id: st
if (!user) return NextResponse.json({ error: "Unauthorized" } as ApiError, { status: 401 });
if (!user.userId) return NextResponse.json({ error: "Unauthorized" } as ApiError, { status: 401 });
+ // Pagination parameters
+ const page = parseInt(req.nextUrl.searchParams.get("page") || "1");
+ const limit = parseInt(req.nextUrl.searchParams.get("limit") || "25");
+ const reverse = req.nextUrl.searchParams.get("reverse") === "true";
+ if (page < 1) {
+ return NextResponse.json({ error: "Page must be greater than 0" } as ApiError, { status: 400 });
+ }
+ if (limit < 1 || limit > 100) {
+ return NextResponse.json({ error: "Limit must be between 1 and 100" } as ApiError, { status: 400 });
+ }
+ const startIndex = (page - 1) * limit;
+ const endIndex = startIndex + limit - 1;
+
const { id } = await params;
const chatExists = await redis.hexists(USER_CHATS_KEY(user.userId), id);
if (!chatExists) {
return NextResponse.json({ error: "Chat not found" } as ApiError, { status: 404 });
}
- const isGenerating = await redis.get(CHAT_GENERATING_KEY(id));
-
try {
+ const total = await redis.llen(CHAT_MESSAGES_KEY(id));
let messageStrings: string[] = [];
- messageStrings = await redis.lrange(CHAT_MESSAGES_KEY(id), 0, -1);
+
+ if (reverse) {
+ // Reverse pagination: newest messages first
+ // Calculate indices from the end
+ const reverseStart = total - (page * limit);
+ const reverseEnd = total - ((page - 1) * limit) - 1;
+ // Clamp indices to valid range
+ const start = Math.max(reverseStart, 0);
+ const end = Math.max(reverseEnd, 0);
+
+ // Redis lrange is inclusive, so ensure start <= end
+ if (start <= end) {
+ messageStrings = await redis.lrange(CHAT_MESSAGES_KEY(id), start, end);
+ } else {
+ messageStrings = [];
+ }
+ // Since lrange returns oldest-to-newest, reverse to get newest-to-oldest
+ messageStrings = messageStrings.reverse();
+ } else {
+ // Normal pagination: oldest messages first
+ messageStrings = await redis.lrange(CHAT_MESSAGES_KEY(id), startIndex, endIndex);
+ }
const messages: Message[] = messageStrings.map(msgStr => {
try {
@@ -33,10 +74,14 @@ export async function GET(_: NextRequest, { params }: { params: Promise<{ id: st
}
}).filter(Boolean);
+ console.log(`Retrieved ${messages.length} messages for chat ${id} on page ${page} with limit ${limit}. Total messages: ${total} hasMore: ${total > endIndex + 1}`);
return NextResponse.json({
messages,
- generating: !!isGenerating,
- }, { status: 200 });
+ total,
+ page,
+ limit,
+ hasMore: total > endIndex + 1,
+ } as ChatMessagesResponse, { status: 200 });
} catch (error) {
console.error("Failed to retrieve messages:", error);
return NextResponse.json({ error: "Failed to retrieve messages" } as ApiError, { status: 500 });
diff --git a/src/app/api/chat/[id]/route.ts b/src/app/api/chat/[id]/route.ts
index f125b21..3ba641f 100644
--- a/src/app/api/chat/[id]/route.ts
+++ b/src/app/api/chat/[id]/route.ts
@@ -1,7 +1,7 @@
import { Message } from "@/app/lib/types/ai";
import { NextRequest, NextResponse } from "next/server";
import { auth, currentUser } from "@clerk/nextjs/server";
-import redis, { USER_CHATS_KEY, USER_CHATS_INDEX_KEY, CHAT_MESSAGES_KEY, USER_FILES_KEY, MESSAGE_STREAM_KEY } from "@/internal-lib/redis";
+import redis, { USER_CHATS_KEY, USER_CHATS_INDEX_KEY, CHAT_MESSAGES_KEY, USER_FILES_KEY, MESSAGE_STREAM_KEY, USER_PINNED_CHATS_KEY } from "@/internal-lib/redis";
import { join } from "path";
import { unlink } from "fs/promises";
import { ApiError } from "@/internal-lib/types/api";
@@ -10,6 +10,7 @@ import { ApiError } from "@/internal-lib/types/api";
interface ChatResponse {
id: string;
label: string;
+ pinned?: boolean;
model: string;
provider: string;
history: Message[];
@@ -45,6 +46,58 @@ export async function GET(_: NextRequest, { params }: { params: Promise<{ id: st
} as ChatResponse, { status: 200 });
}
+export async function POST(req: NextRequest, { params }: { params: Promise<{ id: string }> }) {
+ if (!redis) {
+ return NextResponse.json({ error: "Redis connection failure" } as ApiError, { status: 200 });
+ }
+
+ try {
+ const updateBody = await req.json() as {
+ label?: string;
+ pinned?: boolean;
+ };
+
+ // Check if no fields are provided in the body
+ if (updateBody.label === undefined && updateBody.pinned === undefined) { // It's just label atm, model/provider can't be changed via updating the chat only by sending
+ return NextResponse.json({ error: "At least one field is required (label, pinned)" } as ApiError, { status: 400 });
+ }
+
+ const user = await currentUser();
+ if (!user) return NextResponse.json({ error: "Unauthorized" } as ApiError, { status: 200 });
+ if (user.banned) return NextResponse.json({ error: "Unauthorized" } as ApiError, { status: 200 });
+
+ const { id } = await params;
+
+ const rawChat = await redis.hget(USER_CHATS_KEY(user.id), id);
+ if (!rawChat) return NextResponse.json({ error: "Chat not found" } as ApiError, { status: 200 });
+ const chat = JSON.parse(rawChat);
+
+ // If label is provided, update label
+ if (updateBody.label !== undefined && updateBody.label.trim() !== "") {
+ if (updateBody.label.trim().length > 100) {
+ return NextResponse.json({ error: "Label is too long, maximum length is 100 characters" } as ApiError, { status: 400 });
+ }
+ chat.label = updateBody.label.trim();
+ }
+ // If pinned is provided, update pinned
+ if (updateBody.pinned !== undefined) {
+ if (updateBody.pinned) {
+ await redis.zadd(USER_PINNED_CHATS_KEY(user.id), Date.now(), id);
+ } else {
+ await redis.zrem(USER_PINNED_CHATS_KEY(user.id), id);
+ }
+ chat.pinned = updateBody.pinned;
+ }
+
+ // Update the chat
+ await redis.hset(USER_CHATS_KEY(user.id), id, JSON.stringify(chat));
+ return NextResponse.json({ success: "Updated chat successfully" }, { status: 200 });
+ } catch (error) {
+ console.error("Unknown error occurred while updating a chat: ", (error as Error).message);
+ return NextResponse.json({ error: "An unknown error occurred" }, { status: 500 });
+ }
+}
+
export async function DELETE(_: NextRequest, { params }: { params: Promise<{ id: string }> }) {
if (!redis) {
return NextResponse.json({ error: "Redis connection failure" } as ApiError, { status: 500 });
@@ -54,9 +107,9 @@ export async function DELETE(_: NextRequest, { params }: { params: Promise<{ id:
const user = await currentUser();
if (!user) return NextResponse.json({ error: "Unauthorized" } as ApiError, { status: 401 });
if (user.banned) return NextResponse.json({ error: "Unauthorized" } as ApiError, { status: 401 });
-
+
const { id } = await params;
-
+
// Delete all files belonging to this chat
const USER_FILES_KEY_CONST = USER_FILES_KEY(user.id);
const files = await redis.hgetall(USER_FILES_KEY_CONST);
@@ -76,24 +129,24 @@ export async function DELETE(_: NextRequest, { params }: { params: Promise<{ id:
console.error(`Failed to delete file ${randomName} for chat ${id}:`, error);
}
}
-
+
// Use a transaction to delete both chat data and messages
const result = await redis.multi()
.hdel(USER_CHATS_KEY(user.id), id)
.del(CHAT_MESSAGES_KEY(id))
.zrem(USER_CHATS_INDEX_KEY(user.id), id)
.exec();
-
+
// Clean the redis stream to prevent duplicates
await redis.del(MESSAGE_STREAM_KEY(id)).catch((err) => {
console.error("Failed to trim message stream:", err);
});
-
+
// Check if chat deletion was successful (first operation)
if (!result || result[0][1] === 0) {
return NextResponse.json({ error: "Failed to delete chat" } as ApiError, { status: 404 });
}
-
+
return NextResponse.json({ success: "Chat deleted" }, { status: 200 });
} catch (error) {
console.error("Error deleting chat:", error);
diff --git a/src/app/api/chat/[id]/send/route.ts b/src/app/api/chat/[id]/send/route.ts
index 0a29042..a5eefd8 100644
--- a/src/app/api/chat/[id]/send/route.ts
+++ b/src/app/api/chat/[id]/send/route.ts
@@ -70,7 +70,7 @@ export async function POST(req: NextRequest, { params }: { params: Promise<{ id:
if (requestedModel !== chatJson.model || requestedProvider !== chatJson.provider) {
// If the requested model or provider does not match the chat's model/provider, update the chat
- const chatCopy = Object.assign({}, chatJson as any);
+ const chatCopy = { ...chatJson } as any;
chatCopy.model = chatModel;
chatCopy.provider = chatProvider;
delete chatCopy.id;
diff --git a/src/app/api/chat/route.ts b/src/app/api/chat/route.ts
index 6c571c8..800ed89 100644
--- a/src/app/api/chat/route.ts
+++ b/src/app/api/chat/route.ts
@@ -2,7 +2,7 @@ import { NextRequest, NextResponse } from "next/server";
import { AVAILABLE_PROVIDERS } from "@/app/lib/types/ai";
import { Chat } from "@/app/lib/types/ai";
import { auth, currentUser } from "@clerk/nextjs/server";
-import redis, { USER_CHATS_INDEX_KEY, USER_CHATS_KEY } from "@/internal-lib/redis";
+import redis, { USER_CHATS_INDEX_KEY, USER_CHATS_KEY, USER_PINNED_CHATS_KEY } from "@/internal-lib/redis";
import "@/internal-lib/redis";
import { byokAvailable } from "@/internal-lib/utils/byok";
import { getChatClass } from "@/internal-lib/utils/getChatClass";
@@ -19,7 +19,7 @@ export async function GET(req: NextRequest) {
// Pagination parameters
const page = parseInt(req.nextUrl.searchParams.get("page") || "1");
- const limit = parseInt(req.nextUrl.searchParams.get("limit") || "50");
+ const limit = parseInt(req.nextUrl.searchParams.get("limit") || "25") +1;
if (page < 1) {
return NextResponse.json({ error: "Page must be greater than 0" } as ApiError, { status: 400 });
}
@@ -29,36 +29,91 @@ export async function GET(req: NextRequest) {
const startIndex = (page - 1) * limit;
const endIndex = startIndex + limit - 1;
- const chatIds = await redis.zrevrange(USER_CHATS_INDEX_KEY(user.userId), startIndex, endIndex);
- if (chatIds.length === 0) {
- return NextResponse.json({
- chats: [],
- total: 0,
- page,
- limit,
- hasMore: false
- }, { status: 200 });
+ // Fetch all pinned chat IDs (usually few)
+ const pinnedChatIds = await redis.zrevrange(USER_PINNED_CHATS_KEY(user.userId), 0, -1).catch((err) => {
+ console.error("Error fetching pinned chat IDs:", err);
+ return [] as string[];
+ });
+ const totalPinned = pinnedChatIds.length;
+
+ // Calculate how many pinned chats are on this page
+ let paginatedPinned: string[] = [];
+ let paginatedUnpinned: string[] = [];
+ if (startIndex < totalPinned) {
+ // This page includes some pinned chats
+ const pinnedStart = startIndex;
+ const pinnedEnd = Math.min(totalPinned - 1, endIndex);
+ paginatedPinned = pinnedChatIds.slice(pinnedStart, pinnedEnd + 1);
+ // If not enough pinned to fill the page, fill with unpinned
+ const unpinnedNeeded = limit - paginatedPinned.length;
+ if (unpinnedNeeded > 0) {
+ // Unpinned offset is always 0 for first page, or (startIndex - totalPinned) for later pages
+ const unpinnedOffset = Math.max(0, startIndex - totalPinned);
+ // Fetch a large enough window, filter out pinned, then slice for offset and count
+ const fetchWindow = (unpinnedOffset + unpinnedNeeded) * 3;
+ let allUnpinned: string[] = [];
+ let redisOffset = 0;
+ while (allUnpinned.length < unpinnedOffset + unpinnedNeeded) {
+ const batch = await redis.zrevrange(USER_CHATS_INDEX_KEY(user.userId), redisOffset, redisOffset + fetchWindow - 1).catch((err) => {
+ console.error("Error fetching chat IDs:", err);
+ return [] as string[];
+ });
+ if (batch.length === 0) break;
+ const filtered = batch.filter(id => !pinnedChatIds.includes(id));
+ allUnpinned = [...allUnpinned, ...filtered];
+ redisOffset += fetchWindow;
+ if (batch.length < fetchWindow) break;
+ }
+ paginatedUnpinned = allUnpinned.slice(unpinnedOffset, unpinnedOffset + unpinnedNeeded);
+ }
+ } else {
+ // This page is after all pinned chats, only unpinned
+ const unpinnedOffset = startIndex - totalPinned;
+ const fetchWindow = (unpinnedOffset + limit) * 3;
+ let allUnpinned: string[] = [];
+ let redisOffset = 0;
+ while (allUnpinned.length < unpinnedOffset + limit) {
+ const batch = await redis.zrevrange(USER_CHATS_INDEX_KEY(user.userId), redisOffset, redisOffset + fetchWindow - 1).catch((err) => {
+ console.error("Error fetching chat IDs:", err);
+ return [] as string[];
+ });
+ if (batch.length === 0) break;
+ const filtered = batch.filter(id => !pinnedChatIds.includes(id));
+ allUnpinned = [...allUnpinned, ...filtered];
+ redisOffset += fetchWindow;
+ if (batch.length < fetchWindow) break;
+ }
+ paginatedUnpinned = allUnpinned.slice(unpinnedOffset, unpinnedOffset + limit);
}
+ // Merge pinned and unpinned for this page
+ const paginatedChatIds = [...paginatedPinned, ...paginatedUnpinned];
// Get chat data from hash
- const rawChats = await redis.hmget(USER_CHATS_KEY(user.userId), ...chatIds);
+ const rawChats = await redis.hmget(USER_CHATS_KEY(user.userId), ...paginatedChatIds).catch((err) => {
+ console.error("Error fetching chat data:", err)
+ return [] as string[];
+ });
const chats = rawChats
.map((chatStr, i) => {
try {
return chatStr ? {
...JSON.parse(chatStr),
- id: chatIds[i],
+ id: paginatedChatIds[i],
} : null;
} catch (e) {
// This is gonna screw me over some day..
- console.error(`Failed to parse chat ${chatIds[i]}:`, e);
+ console.error(`Failed to parse chat ${paginatedChatIds[i]}:`, e);
return null;
}
})
.filter(Boolean); // remove nulls
// Get total count once (not paginated)
- const total = await redis.zcard(USER_CHATS_INDEX_KEY(user.userId));
+ const totalChats = await redis.zcard(USER_CHATS_INDEX_KEY(user.userId)).catch((err) => {
+ console.error("Error fetching total chat count:", err);
+ return 0;
+ });
+ const total = totalPinned + (totalChats - totalPinned);
return NextResponse.json({
chats,
@@ -102,7 +157,7 @@ export async function POST(req: NextRequest) {
} as ChatResponse))
.zadd(USER_CHATS_INDEX_KEY(user.id), Date.now(), id)
.exec();
-
+
// Check for failure
if (!result || result.some(([err]) => err)) {
await redis.hdel(USER_CHATS_KEY(user.id), id);
@@ -114,7 +169,7 @@ export async function POST(req: NextRequest) {
console.error("Error creating chat:", error);
return NextResponse.json({ error: "Failed to create chat" } as ApiError, { status: 500 });
}
-
+
return NextResponse.json({
id,
model: chat.model,
diff --git a/src/app/chat/[id]/layout.tsx b/src/app/chat/[id]/layout.tsx
index 8ac1725..a91df9c 100644
--- a/src/app/chat/[id]/layout.tsx
+++ b/src/app/chat/[id]/layout.tsx
@@ -1,31 +1,54 @@
-import React from "react";
-import { cookies } from "next/headers";
+"use client";
+import React, { use, useEffect, useState } from "react";
import ModelProviderClientWrapper from "./ModelProviderClientWrapper";
-// Helper to fetch model/provider on the server
-async function fetchModelProvider(chatId: string) {
- // You may need to pass cookies/headers for auth if required
- const res = await fetch(`${process.env.NEXT_PUBLIC_APP_URL || ""}/api/chat/${chatId}`,
- { headers: { Cookie: cookies().toString() } }
- );
- if (!res.ok) return { model: null, provider: null };
- const data = await res.json();
- return {
- model: data.model || null,
- provider: data.provider || null,
+// Default values
+const DEFAULT_MODEL = "google/gemini-2.5-flash";
+const DEFAULT_PROVIDER = "openrouter";
+
+export default function ChatLayout({ children, params }: {
+ children: React.ReactNode,
+ params: Promise<{ id: string }>,
+}) {
+ // Try to get previous values from localStorage (client only)
+ const getInitialModel = () => {
+ if (typeof window !== "undefined") {
+ return localStorage.getItem("lastModel") || DEFAULT_MODEL;
+ }
+ return DEFAULT_MODEL;
};
-}
+ const getInitialProvider = () => {
+ if (typeof window !== "undefined") {
+ return localStorage.getItem("lastProvider") || DEFAULT_PROVIDER;
+ }
+ return DEFAULT_PROVIDER;
+ };
+
+ const { id } = use(params);
-export default async function ChatLayout({ children, params }: { children: React.ReactNode, params: Promise<{ id: string }> }) {
- const { model, provider } = await fetchModelProvider((await params).id);
+ const [model, setModel] = useState(getInitialModel);
+ const [provider, setProvider] = useState(getInitialProvider);
- if (!model || !provider) {
- return (
-
- Loading chat...
-
- );
- }
+ useEffect(() => {
+ async function fetchModelProvider(chatId: string) {
+ try {
+ const res = await fetch(`/api/chat/${chatId}`);
+ if (!res.ok) return;
+ const data = await res.json();
+ if (data.model) {
+ setModel(data.model);
+ localStorage.setItem("lastModel", data.model);
+ }
+ if (data.provider) {
+ setProvider(data.provider);
+ localStorage.setItem("lastProvider", data.provider);
+ }
+ } catch (e) {
+ // Ignore errors, keep optimistic state
+ }
+ }
+ fetchModelProvider(id);
+ }, [id]);
return (
diff --git a/src/app/chat/[id]/page.tsx b/src/app/chat/[id]/page.tsx
index cbc822f..d14d141 100644
--- a/src/app/chat/[id]/page.tsx
+++ b/src/app/chat/[id]/page.tsx
@@ -1,6 +1,6 @@
"use client";
-import React, { useEffect, useRef, useState, useMemo, useCallback } from "react";
+import React, { useEffect, useRef, useState, useMemo, useCallback, useLayoutEffect } from "react";
import ChatInput from "@/app/components/ChatInput";
import Markdown from "react-markdown";
import remarkGfm from "remark-gfm";
@@ -11,83 +11,65 @@ import { ChunkResponse, Message } from "@/app/lib/types/ai";
import { escape } from "html-escaper";
import rehypeClassAll from "@/app/lib/utils/rehypeClassAll";
import { useParams } from "next/navigation";
-import { loadMessagesFromServer } from "@/app/lib/utils/messageUtils";
import Image from "next/image";
import { Protect, SignedOut } from "@clerk/nextjs";
import { useModelProvider } from "./ModelProviderContext";
+import useSWRInfinite from "swr/infinite";
+import { ChatMessagesResponse } from "@/app/api/chat/[id]/messages/route";
+import { ApiError } from "@/internal-lib/types/api";
+
+// Scrolling not at the bottom = show scroll to bottom button
+function isAtBottom(threshold = 16): boolean {
+ const scrollTop = window.scrollY || document.documentElement.scrollTop;
+ const scrollHeight = document.documentElement.scrollHeight;
+ const clientHeight = document.documentElement.clientHeight;
+
+ return scrollTop + clientHeight >= scrollHeight - threshold;
+}
+
+const SCROLL_TOP_THRESHOLD = 196; // px, adjust as needed for prefetching before top
+const PAGE_SIZE = 15;
export default function Chat() {
const { model, provider } = useModelProvider();
const params = useParams();
const tabId = params.id?.toString() ?? "";
- const [messages, setMessages] = useState([]);
- const messagesRef = useRef(null);
+ const [messagesEl, setMessagesEl] = useState(null);
const [generating, setGenerating] = useState(false);
- const [messagesLoading, setMessagesLoading] = useState(true);
const eventSourceRef = useRef(null);
- const [autoScroll, setAutoScroll] = useState(true);
- const programmaticScrollRef = useRef(false);
- const topSentinelRef = useRef(null);
const [streamError, setStreamError] = useState(null);
const [byokRequired, setByokRequired] = useState(false);
-
- const fetchMessages = useCallback(async () => {
- setMessagesLoading(true);
- const serverMessages = await loadMessagesFromServer(tabId);
- setMessagesLoading(false);
- return serverMessages;
- }, [tabId]);
-
- useEffect(() => {
- if (!tabId) return;
- async function loadInitial() {
- const serverMessages = await fetchMessages();
- setMessages(prev => {
- if (serverMessages.messages.length === prev.length) {
- return prev; // No new messages, return existing
- }
- return [prev, serverMessages.messages].flat();
- });
- // Instantly scroll to bottom after initial messages load
- setTimeout(() => {
- const messagesElement = messagesRef.current;
- if (messagesElement) {
- programmaticScrollRef.current = true;
- window.scrollTo({
- top: messagesElement.scrollHeight,
- behavior: "auto"
- });
- setTimeout(() => {
- programmaticScrollRef.current = false;
- }, 100);
- }
- }, 0);
- }
- loadInitial();
- }, [tabId, fetchMessages]);
-
- useEffect(() => {
- if (!messagesLoading && messages.length > 0 && autoScroll) {
- const messagesElement = messagesRef.current;
- if (messagesElement) {
- programmaticScrollRef.current = true;
- window.scrollTo({
- behavior: "smooth",
- top: messagesElement.scrollHeight,
- });
- setTimeout(() => {
- programmaticScrollRef.current = false;
- }, 100);
- }
+ const [optimisticSentUserMessage, setOptimisticSentUserMessage] = useState(null);
+ const [streamingMessageContent, setStreamingMessageContent] = useState("");
+
+ // TODO: Make use of `error`
+ const { data: pages = [], error, size, setSize, isValidating, isLoading, mutate } = useSWRInfinite(
+ (pageIndex, previousPage) => {
+ if (previousPage && previousPage.length < PAGE_SIZE) return null;
+ return `/api/chat/${tabId}/messages?page=${pageIndex + 1}&limit=${PAGE_SIZE}&reverse=true`;
+ },
+ (url) =>
+ fetch(url)
+ .then(res => res.json() as Promise)
+ .then(json => {
+ if ("error" in json) throw Error(json.error);
+ return json;
+ }),
+ {
+ revalidateOnFocus: false,
+ revalidateOnReconnect: true,
+ keepPreviousData: true,
}
- }, [messages, messagesLoading, autoScroll]);
+ );
const localGenerating = useRef(generating);
const onSend = useCallback(async (message: string, attachments: { url: string; filename: string }[] = [], search: boolean, model: string, provider: string) => {
// Add user message optimistically to UI
const userMessage: Message = { role: "user", parts: [{ text: message }], attachments: attachments.length > 0 ? attachments : undefined };
- setMessages(prev => [...prev, userMessage]);
+ setOptimisticSentUserMessage(userMessage);
+ setStreamingMessageContent("");
+ setStreamError(null);
localGenerating.current = true;
setGenerating(true);
@@ -105,6 +87,8 @@ export default function Chat() {
setGenerating(false);
localGenerating.current = false;
setStreamError("Failed to send message");
+ setOptimisticSentUserMessage(null);
+ setStreamingMessageContent("");
return;
});
@@ -112,56 +96,108 @@ export default function Chat() {
setGenerating(false);
localGenerating.current = false;
setStreamError("Failed to send message");
+ setOptimisticSentUserMessage(null);
+ setStreamingMessageContent("");
return;
}
}, [tabId]);
// Regenerate handler for LLM responses
- const [regeneratingIdx, setRegeneratingIdx] = useState(null);
- const handleRegenerate = useCallback(async (idx: number) => {
- setRegeneratingIdx(idx);
- const deleteRes = await fetch(`/api/chat/${tabId}/messages/delete-from-index?fromIndex=${idx}`, { method: "DELETE" })
- .catch(() => {
+ const [regeneratingIdx, setRegeneratingIdx] = useState<{ pageIdx: number; messageIdx: number } | null>(null);
+ const handleRegenerate = useCallback(
+ async (pageIdx: number, msgIdx: number) => {
+ setGenerating(true);
+ // Find total messages from the first page (should be present in paginated response)
+ const total = pages[0]?.total;
+ if (typeof total !== "number") {
+ setStreamError("Total message count not available");
+ setGenerating(false);
+ return;
+ }
+
+ // Calculate the server index (oldest=0, newest=total-1)
+ const serverIndex = total - (pageIdx * PAGE_SIZE + msgIdx) - 1;
+
+ // Find the previous user message (should be at serverIndex-1)
+ const allMessages = pages.flatMap(page => page.messages);
+ const prevUserMsg = allMessages[serverIndex - 1] as Message | undefined;
+ if (!prevUserMsg || (prevUserMsg?.role) !== "user") {
+ console.warn("No previous user message found for regeneration");
+ setRegeneratingIdx(null);
+ setGenerating(false);
+ return;
+ }
+
+ console.log("Regenerating message at server index:", serverIndex);
+ setStreamingMessageContent("");
+ setRegeneratingIdx({ pageIdx, messageIdx: msgIdx });
+ setStreamError(null);
+ const deleteRes = await fetch(`/api/chat/${tabId}/messages/delete-from-index?fromIndex=${serverIndex}`, { method: "DELETE" })
+ .catch(() => {
+ setRegeneratingIdx(null);
+ setStreamError("Failed to delete message for regeneration");
+ setGenerating(false);
+ return;
+ });
+ if (!deleteRes || !deleteRes.ok) {
setRegeneratingIdx(null);
setStreamError("Failed to delete message for regeneration");
+ setGenerating(false);
return;
- });
- if (!deleteRes || !deleteRes.ok) {
- setRegeneratingIdx(null);
- setStreamError("Failed to delete message for regeneration");
- return;
- }
+ }
- const prevUserMsg = messages[idx - 1];
- if (!prevUserMsg || (prevUserMsg?.role) !== "user") {
- setRegeneratingIdx(null);
- return;
- }
+ const response = await fetch(`/api/chat/${tabId}/regenerate?fromIndex=${serverIndex}`, {
+ method: "GET",
+ headers: {
+ "Content-Type": "application/json",
+ },
+ }).catch(() => {
+ setRegeneratingIdx(null);
+ setStreamError("Failed to regenerate message");
+ setGenerating(false);
+ return;
+ });
- const response = await fetch(`/api/chat/${tabId}/regenerate?fromIndex=${idx}`, {
- method: "GET",
- headers: {
- "Content-Type": "application/json",
- },
- }).catch(() => {
- setRegeneratingIdx(null);
- setStreamError("Failed to regenerate message");
- return;
- });
+ if (!response || !response.ok) {
+ setRegeneratingIdx(null);
+ setStreamError("Failed to regenerate message");
+ setGenerating(false);
+ return;
+ }
- if (!response || !response.ok) {
- setRegeneratingIdx(null);
- setStreamError("Failed to regenerate message");
- return;
- }
+ // Now delete the messages after sending the request on the view
+ // Remove all messages after the to-be-regenerated AI message (but keep the user message) using mutate
+ setGenerating(false);
+ mutate(pages => {
+ if (!pages) return pages;
+ // Clone pages to avoid mutation
+ const newPages = pages.map(page => ({
+ ...page,
+ messages: [...page.messages],
+ }));
+
+ // Remove the message at the given pageIdx and msgIdx,
+ // and remove all messages/pages after it
+ if (
+ newPages[pageIdx] &&
+ newPages[pageIdx].messages &&
+ newPages[pageIdx].messages[msgIdx]
+ ) {
+ // Remove messages after msgIdx in the same page
+ newPages[pageIdx].messages = newPages[pageIdx].messages.slice(0, msgIdx);
+ // Remove all pages after pageIdx
+ newPages.length = pageIdx + 1;
+ // Optionally update total if present
+ if (typeof newPages[0].total === "number") {
+ // Recalculate total as sum of all messages
+ newPages[0].total = newPages.reduce((acc, page) => acc + page.messages.length, 0);
+ }
+ }
- // Now delete the messages after sending the request on the view
- setMessages(prev => {
- const newMessages = [...prev];
- newMessages.splice(idx, 1); // Remove the model message at idx
- return newMessages;
- });
- }, [messages, tabId]);
+ // Remove empty pages except the first one (to avoid empty UI)
+ return newPages.filter((page, idx) => idx === 0 || page.messages.length > 0);
+ });
+ }, [pages, tabId]);
useEffect(() => {
if (eventSourceRef.current) {
@@ -171,11 +207,13 @@ export default function Chat() {
const eventSource = new EventSource(`/api/stream?` + new URLSearchParams({ chat: tabId }).toString());
eventSourceRef.current = eventSource;
- const streamDoneEvent = (event: MessageEvent) => {
+ const streamDoneEvent = async (event: MessageEvent) => {
assistantMessage = "";
- reloadMessagesFromServerIfStateInvalid();
+ await mutate().catch(() => { });
setGenerating(false);
setRegeneratingIdx(null);
+ setOptimisticSentUserMessage(null);
+ setStreamingMessageContent("");
}
eventSource.addEventListener("stream-done", streamDoneEvent);
@@ -183,17 +221,22 @@ export default function Chat() {
assistantMessage = "";
setStreamError(event.data || "An error occurred");
setGenerating(false);
+ setStreamingMessageContent("");
}
eventSource.addEventListener("stream-error", streamErrorEvent);
let assistantMessage = "";
eventSource.onmessage = async (event) => {
+ setStreamError(null);
if (!localGenerating.current) {
// That means this client didn't start the generation, therefore reload the state first
- await reloadMessagesFromServerIfStateInvalid().catch(() => {
+ // Use mutate to reload messages from the server if state is invalid
+ await mutate().catch(() => {
setStreamError("Failed to reload messages from server");
setGenerating(false);
localGenerating.current = false;
+ setOptimisticSentUserMessage(null);
+ setStreamingMessageContent("");
return;
});
}
@@ -210,20 +253,12 @@ export default function Chat() {
if (!parsed.content) return; // Skip empty chunks
assistantMessage += parsed.content;
- setMessages(prev => {
- const newMessages = [...prev];
- const lastMessage = newMessages[newMessages.length - 1];
- if (lastMessage?.role === "model") {
- lastMessage.parts = [{ text: assistantMessage, annotations: parsed.urlCitations || [] }];
- return [...newMessages];
- } else {
- return [...newMessages, { role: "model", parts: [{ text: assistantMessage }] } as Message];
- }
- });
+ setStreamingMessageContent(assistantMessage);
} catch (e) {
console.error("Failed to parse chunk text:", e);
setStreamError("Failed to parse response chunk");
setGenerating(false);
+ setStreamingMessageContent("");
eventSource.close();
}
}
@@ -239,13 +274,18 @@ export default function Chat() {
eventSource.close();
assistantMessage = "";
setGenerating(false);
+ setOptimisticSentUserMessage(null);
+ setStreamingMessageContent("");
}, { once: true });
- eventSource.addEventListener("done", () => {
- reloadMessagesFromServerIfStateInvalid();
+ eventSource.addEventListener("done", async () => {
+ // Use mutate to reload messages from the server if state is invalid
eventSource.close();
assistantMessage = "";
setGenerating(false);
+ setOptimisticSentUserMessage(null);
+ setStreamingMessageContent("");
+ await mutate().catch(() => { });
}, { once: true });
return () => {
@@ -256,10 +296,10 @@ export default function Chat() {
eventSourceRef.current = null;
}
}
- }, [tabId, streamError]);
+ }, [tabId, streamError, mutate]);
- // Memorize initial search state from sessionStorage (lines 262-283 logic)
- const [initialSearch, setInitialSearch] = useState(undefined);
+ // Memorize previous web search state from sessionStorage (lines 262-283 logic)
+ const [webSearchInitiallyOn, setWebSearchInitiallyOn] = useState(undefined);
useEffect(() => {
const tempNewMsg = sessionStorage.getItem("temp-new-tab-msg");
if (tempNewMsg) {
@@ -276,7 +316,7 @@ export default function Chat() {
};
checkEventSource();
});
- setInitialSearch(!!parsedMsg.search);
+ setWebSearchInitiallyOn(!!parsedMsg.search);
sessionStorage.removeItem("temp-new-tab-msg");
waitUntilEventSource.then(() => {
onSend(parsedMsg.message, parsedMsg.attachments || [], parsedMsg.search || false, "", "");
@@ -284,135 +324,120 @@ export default function Chat() {
}
} catch { }
}
-
- let lastScrollY = window.scrollY;
- function handleScroll() {
- if (programmaticScrollRef.current) {
- lastScrollY = window.scrollY;
- return;
- }
- const messagesElement = messagesRef.current;
- if (!messagesElement) return;
- const currentScrollY = window.scrollY;
- const scrollPosition = currentScrollY + window.innerHeight;
- const bottomThreshold = messagesElement.scrollHeight - 35;
-
- if (currentScrollY < lastScrollY) {
- setAutoScroll(false);
- } else if (currentScrollY > lastScrollY) {
- if (scrollPosition >= bottomThreshold) {
- const messagesElement = messagesRef.current;
- if (messagesElement) {
- programmaticScrollRef.current = true;
- window.scrollTo({
- behavior: "instant",
- top: messagesElement.scrollHeight,
- });
- setTimeout(() => {
- programmaticScrollRef.current = false;
- }, 100);
- }
- setAutoScroll(true);
- }
- }
- lastScrollY = currentScrollY;
- }
- window.addEventListener("scroll", handleScroll);
- return () => window.removeEventListener("scroll", handleScroll);
}, [onSend, tabId]);
- const reloadMessagesFromServerIfStateInvalid = useCallback(async () => {
- const serverMessages = await loadMessagesFromServer(tabId);
- if (!messages[messages.length - 1] || messages[messages.length - 1]?.role !== "model") {
- setMessages(serverMessages.messages);
+ const [showScrollToBottom, setShowScrollToBottom] = useState(false);
+ const previousScrollHeightRef = useRef(0);
+ // Fix scroll behavior: only scroll when loading more at top or when at bottom
+ useLayoutEffect(() => {
+ const previousScrollHeight = previousScrollHeightRef.current;
+ const newScrollHeight = document.body.scrollHeight;
+ // If loading more messages at the top (pagination)
+ if (window.scrollY <= SCROLL_TOP_THRESHOLD && newScrollHeight > previousScrollHeight) {
+ // Preserve scroll position when loading more
+ const scrollDelta = newScrollHeight - previousScrollHeight;
+ window.scrollTo({
+ top: scrollDelta,
+ behavior: "auto",
+ });
}
- }, [tabId, messages]);
+ previousScrollHeightRef.current = newScrollHeight;
+ }, [pages]);
useEffect(() => {
- if (autoScroll) {
- setTimeout(() => {
- const event = new Event("scroll");
- window.dispatchEvent(event);
- }, 0);
+ const onScroll = () => {
+ setShowScrollToBottom(!isAtBottom());
+
+ // Make sure the scroll position is retained when loading more
+ if (window.scrollY <= SCROLL_TOP_THRESHOLD && !isValidating && pages[pages.length - 1]?.hasMore) {
+ previousScrollHeightRef.current = document.body.scrollHeight;
+ setSize(size + 1); // triggers useLayoutEffect on `pages` change
+ }
+ };
+
+ window.addEventListener("scroll", onScroll);
+ return () => window.removeEventListener("scroll", onScroll);
+ }, [size, setSize, isValidating, pages, tabId]);
+
+ const initiallyLoadedRef = useRef(false);
+ useEffect(() => {
+ if (initiallyLoadedRef.current) return;
+
+ // Scroll to bottom only once after messages load
+ if (!isLoading && pages.length > 0 && messagesEl) {
+ initiallyLoadedRef.current = true;
+ window.scrollTo({ top: document.body.scrollHeight, behavior: "instant" });
}
- }, [autoScroll]);
+ }, [isLoading, pages, tabId, messagesEl]);
- function handleStopAutoScroll() {
- setAutoScroll(false);
- }
+ // useLayoutEffect(() => {
+ // console.log(pages)
+ // }, [pages]);
- async function handleDeleteMessage(idx: number) {
- if (idx === 0) {
+ // Update handleDeleteMessage to accept pageIdx and msgIdx, and use the same server index calculation as regeneration
+ async function handleDeleteMessage(pageIdx: number, msgIdx: number) {
+ const total = pages[0]?.total;
+ if (typeof total !== "number") {
+ setStreamError("Total message count not available");
+ return;
+ }
+ const serverIndex = total - (pageIdx * PAGE_SIZE + msgIdx) - 1;
+ if (serverIndex === 0) {
// Delete the entire chat if the first message is deleted
await fetch(`/api/chat/${tabId}`, { method: "DELETE" });
// Redirect to home or another page after deletion
window.location.href = "/";
return;
}
- await fetch(`/api/chat/${tabId}/messages/delete-from-index?fromIndex=${idx}`, { method: "DELETE" });
- setMessages(messages.slice(0, idx));
+ await fetch(`/api/chat/${tabId}/messages/delete-from-index?fromIndex=${serverIndex}`, { method: "DELETE" });
+ mutate(pages => {
+ if (!pages) return pages;
+ // Clone pages to avoid mutation
+ const newPages = pages.map(page => ({
+ ...page,
+ messages: [...page.messages],
+ }));
+
+ // Remove the message at the given pageIdx and msgIdx,
+ // and remove all messages/pages after it
+ if (
+ newPages[pageIdx] &&
+ newPages[pageIdx].messages &&
+ newPages[pageIdx].messages[msgIdx]
+ ) {
+ // Remove messages after msgIdx in the same page
+ newPages[pageIdx].messages = newPages[pageIdx].messages.slice(0, msgIdx);
+ // Remove all pages after pageIdx
+ newPages.length = pageIdx + 1;
+ // Optionally update total if present
+ if (typeof newPages[0].total === "number") {
+ // Recalculate total as sum of all messages
+ newPages[0].total = newPages.reduce((acc, page) => acc + page.messages.length, 0);
+ }
+ }
+
+ // Remove empty pages except the first one (to avoid empty UI)
+ return newPages.filter((page, idx) => idx === 0 || page.messages.length > 0);
+ });
}
useEffect(() => {
- fetch("/api/byok/required").then(res => res.json()).then(data => {
- setByokRequired(data.required);
- if (data.required) {
- window.location.href = "/settings";
- }
- });
+ fetch("/api/byok/required")
+ .then(res => res.json())
+ .then((data) => {
+ setByokRequired(data.required);
+ if (data.required) {
+ window.location.href = "/settings";
+ }
+ });
}, []);
-
- // Fetch chat info (model/provider) on mount
- // useEffect(() => {
- // if (!tabId) return;
- // fetch(`/api/chat/${tabId}`)
- // .then(res => res.json())
- // .then data => {
- // if (data && data.model && data.provider) {
- // setModel(data.model);
- // setProvider(data.provider);
- // }
- // })
- // .catch(() => {
- // setModel(null);
- // setProvider(null);
- // });
- // }, [tabId]);
-
if (byokRequired) return null;
return (
<>
- {generating && autoScroll && (
-
- )}
-
-
- {!messagesLoading && (
- <>
- {messages.map((message, idx) => (
-
- ))}
- >
- )}
+
{streamError && (
Message generation failed. You can retry.
@@ -421,43 +446,100 @@ export default function Chat() {
className="bg-red-500 text-white px-4 py-2 rounded hover:bg-red-600 transition cursor-pointer mt-2"
onClick={() => {
setStreamError(null);
- if (messages.length > 1) handleRegenerate(messages.length - 1);
+ // Find last message's pageIdx and messageIdx
+ const lastPageIdx = pages.length - 1;
+ const lastMsgIdx = pages[lastPageIdx]?.messages.length - 1;
+ if (lastPageIdx >= 0 && lastMsgIdx >= 0) handleRegenerate(lastPageIdx, lastMsgIdx);
}}
>
Retry
)}
- {generating && (!messages[messages.length - 1] || messages[messages.length - 1]?.role !== "model") && (
-
-
-
-
-
-
-
- Why is this showing? Latency, reasoning and uploading files
-
-
-
+ {(() => {
+ const messages = pages.flatMap(p => p?.messages);
+ const lastMessage = messages?.[messages?.length - 1];
+ return (
+ (generating || regeneratingIdx) && !streamingMessageContent.trim() && lastMessage?.role === "user" && (
+
+
+
+
+
+
+
+ Why is this showing? Latency, reasoning and uploading files
+
+
+
+ )
+ )
+ })()}
+ {!isLoading && (
+ <>
+ {/* Streaming model message */}
+ {streamingMessageContent && (
+
+ )}
+ {/* Optimistic user message */}
+ {optimisticSentUserMessage && (
+
+ )}
+ {/* Render all messages from SWR */}
+ {pages.flatMap((messages, pageIdx) =>
+ messages.messages.map((message, msgIdx) => (
+
+ ))
+ )}
+ >
)}
+ {showScrollToBottom && (
+
+ )}
{model && provider && (
)}
@@ -575,7 +657,7 @@ const PreWithCopy = ({ node, className, children, ...props }: any) => {
);
};
-const MessageBubble = ({ message, index, onDelete, onRegenerate, regeneratingIdx }: { message: Message, index: number, onDelete?: (idx: number) => void, onRegenerate?: (idx: number) => void, regeneratingIdx?: number | null }) => {
+const MessageBubble = ({ message, index, pageIdx, msgIdx, onDelete, onRegenerate, regeneratingIdx }: { message: Message, index: number, pageIdx: number, msgIdx: number, onDelete?: (pageIdx: number, msgIdx: number) => void, onRegenerate?: (pageIdx: number, msgIdx: number) => void, regeneratingIdx?: { pageIdx: number; messageIdx: number } | null }) => {
const isUser = message?.role === "user";
const className = isUser
? "px-6 py-4 rounded-2xl mb-1 bg-white/[0.06] justify-self-end break-words max-w-full overflow-x-auto"
@@ -672,11 +754,11 @@ const MessageBubble = ({ message, index, onDelete, onRegenerate, regeneratingIdx
return (
setHovered(true)}
onMouseLeave={() => setHovered(false)}
>
-
+
{renderedMarkdown}
{/* Annotations rendering */}
{message.parts && message.parts[0]?.annotations && message.parts[0].annotations.length > 0 && (
@@ -707,7 +789,7 @@ const MessageBubble = ({ message, index, onDelete, onRegenerate, regeneratingIdx