Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
101 changes: 101 additions & 0 deletions app/api/upload/route.ts
Original file line number Diff line number Diff line change
@@ -0,0 +1,101 @@
import { auth } from "@/lib/auth";
import { prisma } from "@/lib/prisma";
import { headers } from "next/headers";
import { NextRequest, NextResponse } from "next/server";
import { revalidatePath } from "next/cache";

Comment thread
alvinsjoy marked this conversation as resolved.
const BACKEND_API_BASE =
process.env.BACKEND_API_BASE ?? "http://127.0.0.1:8000/api/v1";

/**
* Streaming upload proxy – authenticates the user, verifies chat
* ownership, then pipes the raw request body directly to the Python
* backend without buffering the file into Node.js memory.
*/
export async function POST(request: NextRequest) {
// 1. Authenticate
const session = await auth.api.getSession({
headers: await headers(),
});

if (!session) {
return NextResponse.json({ error: "Unauthorized" }, { status: 401 });
}

// 2. Read chatId from query params
const chatId = request.nextUrl.searchParams.get("chatId");

Comment thread
alvinsjoy marked this conversation as resolved.
if (!chatId) {
return NextResponse.json(
{ error: "chatId query parameter is required" },
{ status: 400 },
);
}

// 3. Verify chat ownership
const chat = await prisma.chat.findUnique({
where: { id: chatId },
});

if (!chat || chat.userId !== session.user.id) {
return NextResponse.json(
{ error: "Chat not found or unauthorized" },
{ status: 403 },
);
}

// 4. Stream the request body directly to the Python backend.
// The verified chatId is passed as a query param so the backend
// derives the target chat solely from the server-verified value.
const contentType = request.headers.get("content-type");
const backendUrl = `${BACKEND_API_BASE}/ingest?chat_id=${encodeURIComponent(chatId)}`;

let backendResponse: Response;
try {
backendResponse = await fetch(backendUrl, {
method: "POST",
body: request.body,
headers: {
...(contentType ? { "Content-Type": contentType } : {}),
},
// @ts-expect-error -- Node 18+ supports duplex for streaming request bodies
duplex: "half",
});
} catch (error) {
console.error("Python ingestion transport error:", error);
return NextResponse.json({ error: "Backend unavailable" }, { status: 502 });
}

if (!backendResponse.ok) {
const errText = await backendResponse.text();
console.error("Python ingestion error:", errText);
return NextResponse.json(
{ error: "Backend processing failed", detail: errText },
Comment thread
alvinsjoy marked this conversation as resolved.
{ status: backendResponse.status },
);
}

let result;
try {
result = await backendResponse.json();
} catch {
console.error("Failed to parse Python backend response as JSON");
return NextResponse.json(
{ error: "Invalid response from backend" },
{ status: 502 },
);
}

// 5. Increment document count in Prisma
await prisma.chat.update({
where: { id: chatId },
data: {
documentCount: { increment: 1 },
updatedAt: new Date(),
},
});

revalidatePath("/chat");

return NextResponse.json({ success: true, result });
}
2 changes: 1 addition & 1 deletion app/layout.tsx
Original file line number Diff line number Diff line change
Expand Up @@ -36,7 +36,7 @@ export default function RootLayout({
>
<TooltipProvider>{children}</TooltipProvider>
</ThemeProvider>
<Toaster />
<Toaster richColors />
</body>
</html>
);
Expand Down
6 changes: 3 additions & 3 deletions backend/api/v1/ingestion_routes.py
Original file line number Diff line number Diff line change
Expand Up @@ -4,7 +4,7 @@
"""
import logging

from fastapi import APIRouter, HTTPException, UploadFile, File, Form, Depends
from fastapi import APIRouter, HTTPException, UploadFile, File, Query, Depends

from datetime import datetime
from ...schemas.ingestion import IngestResponse, IngestError
Expand Down Expand Up @@ -44,14 +44,14 @@ def get_service() -> IngestionService:
),
)
async def ingest_document(
chat_id: str = Form(..., description="UUID of the chat session"),
chat_id: str = Query(..., description="UUID of the chat session"),
file: UploadFile = File(..., description="Document file to ingest"),
service: IngestionService = Depends(get_service),
) -> IngestResponse:
"""
Ingest a document into a chat-specific collection.

- **chat_id**: UUID of the chat session (form field)
- **chat_id**: UUID of the chat session (query parameter)
- **file**: Document file (PDF, TXT, or MD)
Comment thread
alvinsjoy marked this conversation as resolved.

The document will be:
Expand Down
81 changes: 57 additions & 24 deletions components/chat-area.tsx
Original file line number Diff line number Diff line change
Expand Up @@ -3,7 +3,7 @@
import React, { useState, useRef, useEffect } from "react";
import { useRouter } from "next/navigation";

import { Send, FileUp, Paperclip, File, X } from "lucide-react";
import { Send, FileUp, Paperclip, File, X, Loader2 } from "lucide-react";
import { Button } from "@/components/ui/button";
import { Card } from "@/components/ui/card";
import { Input } from "@/components/ui/input";
Expand All @@ -19,11 +19,7 @@ import {
} from "@/lib/date-format";
// Import shared types
import type { Chat as ChatModel, Message } from "@/lib/types";
import {
sendMessageAction,
uploadDocumentAction,
deleteDocumentAction,
} from "@/lib/actions/chat";
import { sendMessageAction, deleteDocumentAction } from "@/lib/actions/chat";
import { toast } from "sonner";
import { Greeting } from "@/components/greeting";

Expand All @@ -41,6 +37,7 @@ interface UploadedDocument {
name: string;
size: string;
filename: string;
uploading?: boolean;
}

export function ChatArea({
Expand Down Expand Up @@ -97,6 +94,9 @@ export function ChatArea({
}
} catch (error) {
console.error("Failed to send message:", error);
toast.error("Failed to send message", {
description: "Could not reach the server. Please try again.",
});
} finally {
setIsGenerating(false);
// Removed router.refresh() because the Server Action handles revalidation natively!
Expand All @@ -120,14 +120,14 @@ export function ChatArea({
const files = e.target.files;
if (!files || files.length === 0) return;

const MAX_SIZE_BYTES = 1 * 1024 * 1024; // 1 MB
const MAX_SIZE_BYTES = 50 * 1024 * 1024; // 50 MB

// Filter out oversized files and show a toast for each rejected one
const validFiles: File[] = [];
for (const file of Array.from(files)) {
if (file.size > MAX_SIZE_BYTES) {
toast.error(`"${file.name}" is too large`, {
description: `Files must be under 1 MB. This file is ${(file.size / (1024 * 1024)).toFixed(2)} MB.`,
description: `Files must be under ${(MAX_SIZE_BYTES / (1024 * 1024)).toFixed(2)} MB. This file is ${(file.size / (1024 * 1024)).toFixed(2)} MB.`,
});
} else {
validFiles.push(file);
Expand All @@ -148,6 +148,7 @@ export function ChatArea({
name: file.name,
size: (file.size / 1024).toFixed(2) + " KB",
filename: file.name,
uploading: true,
} satisfies UploadedDocument,
}));

Expand All @@ -156,25 +157,45 @@ export function ChatArea({
...uploadQueue.map((item) => item.optimisticDocument),
]);

// Upload to backend
// Upload to backend via streaming API route
for (const { file, optimisticDocument } of uploadQueue) {
const formData = new FormData();
formData.append("file", file);
formData.append("chat_id", currentChat?.id || "default-chat");

try {
const result = await uploadDocumentAction(formData);
console.log("File uploaded successfully:", result);
const uploadPromise = fetch(
`/api/upload?chatId=${encodeURIComponent(currentChat?.id || "default-chat")}`,
{ method: "POST", body: formData },
).then(async (response) => {
if (!response.ok) {
const err = await response.json().catch(() => ({}));
throw new Error(err.error || "Upload failed");
}
Comment on lines +170 to +173
Copy link
Copy Markdown
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

⚠️ Potential issue | 🟡 Minor

Preserve backend failure detail in toast errors.

Only err.error is surfaced; useful backend messages in detail are lost, so users get generic upload failures.

💡 Suggested tweak
-          if (!response.ok) {
-            const err = await response.json().catch(() => ({}));
-            throw new Error(err.error || "Upload failed");
-          }
+          if (!response.ok) {
+            const err = await response.json().catch(() => null);
+            const message =
+              err?.detail?.message ??
+              err?.detail ??
+              err?.error ??
+              `Upload failed (${response.status})`;
+            throw new Error(message);
+          }
🤖 Prompt for AI Agents
Verify each finding against the current code and only fix it if needed.

In `@components/chat-area.tsx` around lines 171 - 174, The current response error
handling in the upload flow (the block checking response.ok in
components/chat-area.tsx) only surfaces err.error and discards err.detail;
update the throw to include backend detail by reading both err.error and
err.detail (e.g., build a message like `${err.error || 'Upload
failed'}${err.detail ? ': ' + err.detail : ''}`) so the toast shows the backend
failure details; keep the existing fallback to "Upload failed" when neither
field exists and preserve the try/catch around response.json() that defaults to
{}.

return response.json();
});

// Trigger navigation refresh to update document count in layout
router.refresh();
} catch (error) {
console.error("Error uploading file:", error);
// Remove failed document from optimistic UI.
setDocuments((prev) =>
prev.filter((d) => d.id !== optimisticDocument.id),
);
toast.error(`Failed to upload "${file.name}"`);
toast.promise(uploadPromise, {
loading: `Uploading "${file.name}"...`,
success: () => {
setDocuments((prev) =>
prev.map((d) =>
d.id === optimisticDocument.id ? { ...d, uploading: false } : d,
),
);
router.refresh();
return `"${file.name}" uploaded successfully`;
},
error: (err) => {
setDocuments((prev) =>
prev.filter((d) => d.id !== optimisticDocument.id),
);
return `Failed to upload "${file.name}": ${err.message}`;
},
});

await uploadPromise;
} catch {
console.error("Failed to upload file");
}
}

Expand All @@ -193,6 +214,9 @@ export function ChatArea({
router.refresh();
} catch (error) {
console.error("Failed to delete document from collection:", error);
toast.error(`Failed to remove "${doc.name}"`, {
description: "Could not delete the document. Please try again.",
});
}
}
};
Expand Down Expand Up @@ -260,7 +284,11 @@ export function ChatArea({
key={doc.id}
className="group flex items-center gap-2 rounded-lg bg-accent px-3 py-1.5 text-sm"
>
<File className="size-4 text-muted-foreground" />
{doc.uploading ? (
<Loader2 className="size-4 text-muted-foreground animate-spin" />
) : (
<File className="size-4 text-muted-foreground" />
)}
<span className="max-w-50 truncate text-accent-foreground">
{doc.name}
</span>
Expand Down Expand Up @@ -321,7 +349,10 @@ export function ChatArea({
{showDateDivider && ts !== null && (
<div className="my-2 flex items-center gap-1">
<Separator className="flex-1" />
<span className="min-w-max text-xs font-semibold text-muted-foreground" suppressHydrationWarning>
<span
className="min-w-max text-xs font-semibold text-muted-foreground"
suppressHydrationWarning
>
{formatChatLongDate(ts)}
</span>
<Separator className="flex-1" />
Expand Down Expand Up @@ -378,7 +409,9 @@ export function ChatArea({
{message.role === "assistant" ? "AI" : "You"}
</span>
{ts !== null ? (
<span suppressHydrationWarning>{formatChatTime(ts)}</span>
<span suppressHydrationWarning>
{formatChatTime(ts)}
</span>
) : (
<span>{message.timestamp}</span>
)}
Expand Down
49 changes: 0 additions & 49 deletions lib/actions/chat.ts
Original file line number Diff line number Diff line change
Expand Up @@ -137,57 +137,8 @@ export async function sendMessageAction(chatId: string, content: string) {
};
}

export async function uploadDocumentAction(formData: FormData) {
const session = await auth.api.getSession({
headers: await headers(),
});

if (!session) {
throw new Error("Unauthorized");
}

const chatId = formData.get("chat_id") as string;
const file = formData.get("file");

if (!chatId || !file) {
throw new Error("chat_id and file are required");
}

// Verify chat ownership
const chat = await prisma.chat.findUnique({
where: { id: chatId },
});

if (!chat || chat.userId !== session.user.id) {
throw new Error("Chat not found or unauthorized");
}

// Forward the file to Python ML backend
const response = await fetch(`${BACKEND_API_BASE}/ingest`, {
method: "POST",
body: formData,
});

if (!response.ok) {
const errText = await response.text();
console.error("Python ingestion error:", errText);
throw new Error(`Backend processing failed: ${errText}`);
}

const result = await response.json();

// Increment document count in Next.js Prisma
await prisma.chat.update({
where: { id: chatId },
data: {
documentCount: { increment: 1 },
updatedAt: new Date(),
},
});

revalidatePath("/chat");
return { success: true, result };
}

export async function deleteDocumentAction(chatId: string, filename: string) {
const session = await auth.api.getSession({
Expand Down