Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
35 changes: 34 additions & 1 deletion backend/app/api/routes/chat.py
Original file line number Diff line number Diff line change
Expand Up @@ -6,14 +6,16 @@
from pydantic import BaseModel, Field
from sse_starlette.sse import EventSourceResponse

from app.services.llm import get_english_translation, get_response_stream_async
from app.services.llm import get_english_translation, get_response_stream_async, generate_chat_title_async
from app.services.embedding import embedding_service
from app.services.database import get_client
from app.core.rate_limit import limiter

router = APIRouter()
logger = logging.getLogger(__name__)

DEFAULT_CHAT_TITLE = "새로운 대화"

class HistoryMessage(BaseModel):
role: str
content: str
Expand All @@ -22,6 +24,9 @@ class ChatRequest(BaseModel):
query: str
history: List[HistoryMessage] = Field(default_factory=list)

class TitleRequest(BaseModel):
query: str = Field(..., max_length=1024)

def _search_documents(query_vector):
return get_client().rpc(
'match_documents',
Expand Down Expand Up @@ -127,3 +132,31 @@ async def chat_endpoint(request: Request, chat_request: ChatRequest):
Endpoint for accepting chat queries and returning a text/event-stream response.
"""
return EventSourceResponse(generate_chat_events(request, chat_request.query, chat_request.history))

@router.post("/title")
@limiter.limit("5/minute")
async def chat_title_endpoint(request: Request, title_request: TitleRequest):
"""
Endpoint for generating a short chat room title based on the first user query.
"""
query = title_request.query.strip()
if not query:
return {"title": DEFAULT_CHAT_TITLE}

try:
title = await asyncio.wait_for(generate_chat_title_async(query), timeout=10.0)
# Handle case where LLM returns something too long or with quotes
title = title.replace('"', '').replace("'", "").strip()
if not title:
return {"title": DEFAULT_CHAT_TITLE}
MAX_TITLE_LEN = 20
ELLIPSIS = "..."
if len(title) > MAX_TITLE_LEN:
title = title[: MAX_TITLE_LEN - len(ELLIPSIS)] + ELLIPSIS
return {"title": title}
Comment thread
coderabbitai[bot] marked this conversation as resolved.
except asyncio.TimeoutError:
logger.warning("Timeout generating chat title")
return {"title": DEFAULT_CHAT_TITLE}
except Exception:
Comment thread
coderabbitai[bot] marked this conversation as resolved.
logger.exception("Failed to generate chat title")
return {"title": DEFAULT_CHAT_TITLE}
16 changes: 16 additions & 0 deletions backend/app/services/llm.py
Original file line number Diff line number Diff line change
Expand Up @@ -120,3 +120,19 @@ async def get_response_stream_async(context: str, query: str, history: str = "")
chain = prompt | get_llm() | StrOutputParser()
async for chunk in chain.astream({"context": context, "chat_history": history, "query": query}):
yield chunk

title_prompt = PromptTemplate.from_template(
"""주어진 질문을 기반으로 철학적인 대화방 제목을 15자 이내로 지어줘.
부연 설명 없이 제목만 출력해.

질문: {query}
제목: """
)

async def generate_chat_title_async(query: str) -> str:
"""
Generates a short chat title based on the user's first query using Gemini.
"""
chain = title_prompt | get_llm() | StrOutputParser()
title = await chain.ainvoke({"query": query})
return title.strip()
134 changes: 67 additions & 67 deletions backend/data/books_mapping.json

Large diffs are not rendered by default.

10 changes: 7 additions & 3 deletions backend/scripts/generate_book_mapping.py
Original file line number Diff line number Diff line change
Expand Up @@ -111,11 +111,15 @@ async def translate_book_info(file_name: str) -> dict:
current_key_idx += 1
else:
print(f"Failed to parse LLM translation for {file_name}: {e}")
current_key_idx += 1
break

# If all keys exhausted or other error, fallback
print(f"LLM Failed for {file_name}, falling back to Kyobo Search...")
return await kyobo_fallback(file_name, "")
name_without_ext = Path(file_name).stem
parts = name_without_ext.rsplit(" by ", 1)
fallback_title = parts[0].strip()
fallback_author = parts[1].strip() if len(parts) == 2 else ""
return await kyobo_fallback(fallback_title, fallback_author)

async def search_aladin(title: str, author: str) -> dict:
if not ALADIN_API_KEY:
Expand All @@ -139,7 +143,7 @@ def fetch():
item = items[0]
return {
"title": item.get("title", ""),
"link": item.get("link", ""),
"link": item.get("link", "").replace("&", "&"),
"thumbnail": item.get("cover", ""),
"author": item.get("author", ""),
"isbn": item.get("isbn13", "")
Expand Down
4 changes: 2 additions & 2 deletions backend/scripts/generate_sql_updates.py
Original file line number Diff line number Diff line change
Expand Up @@ -12,9 +12,9 @@ def generate_sql():
# 1. Create a B-Tree index on the title field to make string matching instant
# instead of doing a full sequential table scan
sql_statements = [
"CREATE INDEX CONCURRENTLY IF NOT EXISTS idx_documents_book_title ON documents ((metadata->'book_info'->>'title'));\n\n",
"BEGIN;\n",
"CREATE INDEX IF NOT EXISTS idx_documents_book_title ON documents ((metadata->'book_info'->>'title'));\n",
"SET statement_timeout = '120s'; -- Increase timeout to be safe\n"
"SET LOCAL statement_timeout = '120s'; -- Increase timeout to be safe for this transaction\n"
]

for book in mapping_data:
Expand Down
6 changes: 5 additions & 1 deletion backend/scripts/update_db_metadata.py
Original file line number Diff line number Diff line change
Expand Up @@ -85,6 +85,10 @@ def update_database():
doc_id = doc['id']
metadata = doc['metadata']

if not isinstance(metadata, dict):
print(f"Skipping doc {doc_id}: metadata is not a dict")
continue

# The DB stores the title we want to match inside metadata->book_info->title
db_title = metadata.get('book_info', {}).get('title', '')

Comment thread
coderabbitai[bot] marked this conversation as resolved.
Expand Down Expand Up @@ -119,7 +123,7 @@ def update_doc(doc):
return True
except Exception as e:
if attempt < max_retries - 1:
sleep(0.5 * (attempt + 1)) # Exponential backoff
sleep(0.5 * (2 ** attempt)) # Exponential backoff
continue
print(f"Error updating {doc['id']}: {e}")
return False
Expand Down
6 changes: 3 additions & 3 deletions backend/update_metadata.sql
Original file line number Diff line number Diff line change
@@ -1,10 +1,10 @@
CREATE INDEX IF NOT EXISTS idx_documents_book_title ON documents ((metadata->'book_info'->>'title'));
CREATE INDEX CONCURRENTLY IF NOT EXISTS idx_documents_book_title ON documents ((metadata->'book_info'->>'title'));


SET statement_timeout = '120s'; -- Increase timeout to be safe

BEGIN;

SET LOCAL statement_timeout = '120s'; -- Increase timeout to be safe for this transaction

UPDATE documents
SET metadata = metadata || '{"kr_title": "역설의 예산 1권", "thumbnail": "", "link": ""}'::jsonb
WHERE metadata->'book_info'->>'title' = 'Korean Translation of A Budget of Paradoxes Volume I';
Expand Down
31 changes: 27 additions & 4 deletions frontend/app/page.tsx
Original file line number Diff line number Diff line change
Expand Up @@ -3,12 +3,14 @@
import { useState, useCallback } from "react";
import { Sidebar } from "../components/sidebar/Sidebar";
import { ChatMain } from "../components/chat/ChatMain";
import { Message } from "../types/chat";
import { Message, DocumentMetadata } from "../types/chat";

export default function Home() {
const [messages, setMessages] = useState<Message[]>([]);
const [isSubmitting, setIsSubmitting] = useState(false);
const [isSidebarOpen, setIsSidebarOpen] = useState(false);
const [chatTitle, setChatTitle] = useState<string>("새로운 대화");
const [activeMetadata, setActiveMetadata] = useState<DocumentMetadata[]>([]);

const processLine = useCallback((line: string, eventObj: { current: string }, aiMsgId: string): boolean => {
if (line.startsWith("event: ")) {
Expand Down Expand Up @@ -67,13 +69,28 @@ export default function Home() {
setMessages((prev) => [...prev, newUserMsg, placeholderAiMsg]);
setIsSubmitting(true);

const isFirstMessage = messages.length === 0;
const baseUrl = process.env.NEXT_PUBLIC_API_BASE_URL || "http://localhost:8000";

if (isFirstMessage) {
fetch(`${baseUrl}/api/v1/chat/title`, {
method: "POST",
headers: { "Content-Type": "application/json" },
body: JSON.stringify({ query: query })
})
.then(res => res.json())
.then(data => {
if (data.title) setChatTitle(data.title);
})
.catch(err => console.error("Failed to fetch title:", err));
}

try {
const historyToSend = messages.slice(-10).map(msg => ({
role: msg.role,
content: msg.content
}));

const baseUrl = process.env.NEXT_PUBLIC_API_BASE_URL || "http://localhost:8000";
const res = await fetch(`${baseUrl}/api/v1/chat`, {
method: "POST",
headers: { "Content-Type": "application/json" },
Expand Down Expand Up @@ -152,13 +169,19 @@ export default function Home() {

return (
<div className="flex h-screen overflow-hidden relative">
<Sidebar messages={messages} isOpen={isSidebarOpen} onClose={() => setIsSidebarOpen(false)} />
<Sidebar messages={messages} activeMetadata={activeMetadata} isOpen={isSidebarOpen} onClose={() => setIsSidebarOpen(false)} />
<ChatMain
chatTitle={chatTitle}
messages={messages}
onSendMessage={handleSendMessage}
isSubmitting={isSubmitting}
onClearChat={() => setMessages([])}
onClearChat={() => {
setMessages([]);
setChatTitle("새로운 대화");
setActiveMetadata([]);
}}
onMenuClick={() => setIsSidebarOpen(true)}
onVisibleMessageChange={setActiveMetadata}
/>
</div>
);
Expand Down
10 changes: 6 additions & 4 deletions frontend/components/chat/ChatMain.tsx
Original file line number Diff line number Diff line change
Expand Up @@ -4,17 +4,19 @@ import { Share, Plus, Menu } from "lucide-react";
import { useRef, useEffect, useState } from "react";
import { MessageList } from "./MessageList";
import { FloatingInput } from "./FloatingInput";
import { Message } from "../../types/chat";
import { Message, DocumentMetadata } from "../../types/chat";

interface ChatMainProps {
messages: Message[];
chatTitle?: string;
onSendMessage: (query: string) => void;
isSubmitting: boolean;
onClearChat: () => void;
onMenuClick?: () => void;
onVisibleMessageChange?: (meta: DocumentMetadata[]) => void;
Comment thread
coderabbitai[bot] marked this conversation as resolved.
}

export function ChatMain({ messages, onSendMessage, isSubmitting, onClearChat, onMenuClick }: ChatMainProps) {
export function ChatMain({ messages, chatTitle = "새로운 대화", onSendMessage, isSubmitting, onClearChat, onMenuClick, onVisibleMessageChange }: ChatMainProps) {
const messagesEndRef = useRef<HTMLDivElement>(null);
const [shouldAutoScroll, setShouldAutoScroll] = useState(true);
const [startTime, setStartTime] = useState<string>("");
Expand Down Expand Up @@ -51,7 +53,7 @@ export function ChatMain({ messages, onSendMessage, isSubmitting, onClearChat, o
<Menu className="w-5 h-5" />
</button>
<div>
<h2 className="font-display text-xl md:text-3xl text-white/90">미덕에 관한 대화</h2>
<h2 className="font-display text-xl md:text-3xl text-white/90 transition-all duration-300">{chatTitle}</h2>
<p className="text-xs md:text-sm text-white/40 mt-1">세션 시작: {mounted ? startTime : ""}</p>
</div>
</div>
Expand All @@ -69,7 +71,7 @@ export function ChatMain({ messages, onSendMessage, isSubmitting, onClearChat, o

{/* Scrollable Message Area */}
<div className="flex-1 overflow-y-auto w-full relative" onScroll={handleScroll}>
<MessageList messages={messages} />
<MessageList messages={messages} onVisibleMessageChange={onVisibleMessageChange} />
<div ref={messagesEndRef} />
</div>

Expand Down
2 changes: 1 addition & 1 deletion frontend/components/chat/FloatingInput.tsx
Original file line number Diff line number Diff line change
Expand Up @@ -37,7 +37,7 @@ export function FloatingInput({ onSendMessage, isSubmitting }: FloatingInputProp
<textarea
id="chat-input"
className="w-full bg-transparent border-0 text-white/90 placeholder-white/30 focus:ring-0 p-0 resize-none font-sans leading-relaxed text-sm md:text-base max-h-32 outline-none"
placeholder="미덕, 형이상학 등에 대해 편하게 물어보세요..."
placeholder="당신의 고민이나 궁금한 점을 편하게 자유롭게 물어보세요..."
rows={1}
style={{ minHeight: "24px" }}
value={inputValue}
Expand Down
82 changes: 78 additions & 4 deletions frontend/components/chat/MessageList.tsx
Original file line number Diff line number Diff line change
@@ -1,4 +1,5 @@
import { Sparkles, SquareArrowOutUpRight, ThumbsUp, Copy, RotateCcw } from "lucide-react";
import { useEffect, useRef } from "react";
import { Message, DocumentMetadata } from "../../types/chat";

const DUMMY_COVER_URL = "https://image.aladin.co.kr/product/dummy";
Expand All @@ -7,18 +8,91 @@ const DUMMY_BOOK_LINK = "https://www.aladin.co.kr/dummy-link";
interface Props {
messages: Message[];
onOpenCitation?: (meta: DocumentMetadata) => void;
onVisibleMessageChange?: (meta: DocumentMetadata[]) => void;
}

export function MessageList({ messages, onOpenCitation }: Props) {
export function MessageList({ messages, onOpenCitation, onVisibleMessageChange }: Props) {
const observer = useRef<IntersectionObserver | null>(null);
const visibleMessages = useRef<Map<string, number>>(new Map());

useEffect(() => {
if (!onVisibleMessageChange) return;
visibleMessages.current.clear();

observer.current = new IntersectionObserver((entries) => {
let changed = false;
entries.forEach(entry => {
const id = entry.target.getAttribute("data-message-id");
if (id) {
if (entry.isIntersecting) {
visibleMessages.current.set(id, entry.intersectionRatio);
} else {
visibleMessages.current.delete(id);
}
changed = true;
}
});

if (changed) {
let maxRatio = -1;
let mostVisibleId: string | null = null;
visibleMessages.current.forEach((ratio, id) => {
if (ratio > maxRatio) {
maxRatio = ratio;
mostVisibleId = id;
}
});

if (mostVisibleId) {
const emitLatestMetadataOrEmpty = () => {
const aiMessages = messages.filter(m => m.role === "ai" && m.metadata && m.metadata.length > 0);
if (aiMessages.length > 0) {
onVisibleMessageChange(aiMessages[aiMessages.length - 1].metadata!);
} else {
onVisibleMessageChange([]);
}
};

const msg = messages.find(m => m.id === mostVisibleId);
if (msg && msg.metadata && msg.metadata.length > 0) {
onVisibleMessageChange(msg.metadata);
} else {
emitLatestMetadataOrEmpty();
}
} else {
const emitLatestMetadataOrEmpty = () => {
const aiMessages = messages.filter(m => m.role === "ai" && m.metadata && m.metadata.length > 0);
if (aiMessages.length > 0) {
onVisibleMessageChange(aiMessages[aiMessages.length - 1].metadata!);
} else {
onVisibleMessageChange([]);
}
};
emitLatestMetadataOrEmpty();
}
}
}, {
threshold: [0, 0.25, 0.5, 0.75, 1.0]
});

const elements = document.querySelectorAll(".ai-message-card");
elements.forEach(el => { observer.current?.observe(el); });

return () => {
observer.current?.disconnect();
visibleMessages.current.clear();
};
}, [messages, onVisibleMessageChange]);
Comment thread
coderabbitai[bot] marked this conversation as resolved.

if (messages.length === 0) {
return (
<div className="w-full h-full flex flex-col items-center justify-center text-center p-8">
<div className="h-16 w-16 rounded-full bg-gradient-to-br from-primary/20 to-transparent border border-primary/30 flex items-center justify-center mb-6 shadow-xl">
<Sparkles className="text-primary w-8 h-8" />
</div>
<h3 className="font-display text-2xl text-white/90 mb-2">어떤 철학적 고민이 있으신가요?</h3>
<h3 className="font-display text-2xl text-white/90 mb-2">무엇이 당신을 사유하게 만드나요?</h3>
<p className="text-white/50 max-w-md mx-auto text-sm leading-relaxed">
미덕, 죽음, 사랑, 자아 등 삶의 본질적인 질문들을 과거의 위대한 철학자들과 함께 탐구해보세요.
크고 작은 고민부터 삶의 본질적인 질문까지, 위대한 철학자들의 지혜를 통해 새로운 관점을 발견해보세요.
</p>
</div>
);
Expand All @@ -40,7 +114,7 @@ export function MessageList({ messages, onOpenCitation }: Props) {
</div>
</div>
) : (
<div key={msg.id} className="flex gap-4 md:gap-6 group">
<div key={msg.id} data-message-id={msg.id} className="ai-message-card flex gap-4 md:gap-6 group">
<div className="shrink-0 flex flex-col items-center gap-3">
<div className="h-8 w-8 md:h-10 md:w-10 rounded-full bg-gradient-to-br from-[#1a1a1e] to-black border border-primary/30 flex items-center justify-center shadow-[0_0_15px_rgba(217,183,74,0.15)] relative">
<Sparkles className="text-primary w-4 h-4 md:w-5 md:h-5" />
Expand Down
Loading