diff --git a/frontend/.eslintrc.json b/frontend/.eslintrc.json new file mode 100644 index 000000000..a42aec26b --- /dev/null +++ b/frontend/.eslintrc.json @@ -0,0 +1,7 @@ +{ + "extends": ["next/core-web-vitals"], + "rules": { + "@next/next/no-img-element": "off" + } +} + diff --git a/frontend/README.md b/frontend/README.md index 56347bab6..6c57e449e 100644 --- a/frontend/README.md +++ b/frontend/README.md @@ -1,3 +1,46 @@ -### Front End +### Frontend (Next.js + Tailwind) -Please populate this README with instructions on how to run the application! \ No newline at end of file +Welcome to the shiny chat UI for your FastAPI OpenAI backend. It streams responses in real time, lets you bring your own API key, and stays out of your way. + +## Quickstart + +1) Install deps +```bash +cd frontend +npm install +``` + +2) Configure backend URL (optional) +- The app defaults to `http://localhost:8000`. To change it, set `NEXT_PUBLIC_API_BASE_URL` in your shell before starting dev: +```bash +export NEXT_PUBLIC_API_BASE_URL="http://localhost:8000" +``` + +3) Run the dev server +```bash +npm run dev +# open http://localhost:3000 +``` + +4) Use your OpenAI API key +- Paste your API key into the input at the top of the page. It’s stored in `localStorage` only on your device for local dev. + +## Features +- App Router (Next 14) + TypeScript +- Tailwind for comfy styling +- Streaming UI wired to the backend `/api/chat` +- Model and system prompt inputs for quick experimentation + +## How it works +- The UI sends a POST to `${NEXT_PUBLIC_API_BASE_URL}/api/chat` with `{ developer_message, user_message, model, api_key }`. +- It reads the streaming response and renders tokens as they arrive. + +## Scripts +```bash +npm run dev # start dev server on :3000 +npm run build # build for production +npm run start # run production build on :3000 +npm run lint # run eslint +``` + +Have fun and ship something delightful. 😎 \ No newline at end of file diff --git a/frontend/next-env.d.ts b/frontend/next-env.d.ts new file mode 100644 index 000000000..ef92dbe39 --- /dev/null +++ b/frontend/next-env.d.ts @@ -0,0 +1,5 @@ +/// +/// +/// +// NOTE: This file should not be edited + diff --git a/frontend/next.config.ts b/frontend/next.config.ts new file mode 100644 index 000000000..384bf20e9 --- /dev/null +++ b/frontend/next.config.ts @@ -0,0 +1,11 @@ +import type { NextConfig } from 'next' + +const nextConfig: NextConfig = { + reactStrictMode: true, + experimental: { + typedRoutes: true + } +} + +export default nextConfig + diff --git a/frontend/package.json b/frontend/package.json new file mode 100644 index 000000000..e0cab671b --- /dev/null +++ b/frontend/package.json @@ -0,0 +1,28 @@ +{ + "name": "frontend", + "version": "0.1.0", + "private": true, + "scripts": { + "dev": "next dev -p 3000", + "build": "next build", + "start": "next start -p 3000", + "lint": "next lint" + }, + "dependencies": { + "next": "14.2.5", + "react": "18.3.1", + "react-dom": "18.3.1" + }, + "devDependencies": { + "@types/node": "20.12.12", + "@types/react": "18.3.3", + "@types/react-dom": "18.3.0", + "autoprefixer": "10.4.19", + "eslint": "8.57.0", + "eslint-config-next": "14.2.5", + "postcss": "8.4.38", + "tailwindcss": "3.4.10", + "typescript": "5.4.5" + } +} + diff --git a/frontend/postcss.config.js b/frontend/postcss.config.js new file mode 100644 index 000000000..2ce518bbc --- /dev/null +++ b/frontend/postcss.config.js @@ -0,0 +1,7 @@ +module.exports = { + plugins: { + tailwindcss: {}, + autoprefixer: {}, + }, +} + diff --git a/frontend/src/app/globals.css b/frontend/src/app/globals.css new file mode 100644 index 000000000..347d73b13 --- /dev/null +++ b/frontend/src/app/globals.css @@ -0,0 +1,13 @@ +@tailwind base; +@tailwind components; +@tailwind utilities; + +/* Global tweaks */ +:root { + --radius: 12px; +} + +* { box-sizing: border-box; } + +input, textarea { outline: none; } + diff --git a/frontend/src/app/layout.tsx b/frontend/src/app/layout.tsx new file mode 100644 index 000000000..0afa10e1f --- /dev/null +++ b/frontend/src/app/layout.tsx @@ -0,0 +1,20 @@ +import './globals.css' +import type { Metadata } from 'next' + +export const metadata: Metadata = { + title: 'OpenAI Chat UI', + description: 'Streamed chat UI for FastAPI OpenAI backend', +} + +export default function RootLayout({ + children, +}: { + children: React.ReactNode +}) { + return ( + + {children} + + ) +} + diff --git a/frontend/src/app/page.tsx b/frontend/src/app/page.tsx new file mode 100644 index 000000000..dc00ef856 --- /dev/null +++ b/frontend/src/app/page.tsx @@ -0,0 +1,143 @@ +"use client" +import { useEffect, useRef, useState } from 'react' + +type Message = { + id: string + role: 'user' | 'assistant' + content: string +} + +export default function HomePage() { + const [apiKey, setApiKey] = useState(typeof window !== 'undefined' ? localStorage.getItem('OPENAI_API_KEY') || '' : '') + const [model, setModel] = useState('gpt-4.1-mini') + const [developerMessage, setDeveloperMessage] = useState('You are a helpful AI assistant.') + const [input, setInput] = useState('') + const [messages, setMessages] = useState([]) + const [isStreaming, setIsStreaming] = useState(false) + const streamBuffer = useRef('') + const listRef = useRef(null) + + useEffect(() => { + localStorage.setItem('OPENAI_API_KEY', apiKey) + }, [apiKey]) + + useEffect(() => { + listRef.current?.scrollTo({ top: listRef.current.scrollHeight, behavior: 'smooth' }) + }, [messages]) + + async function sendMessage() { + if (!apiKey) { + alert('Please provide your OpenAI API key.') + return + } + if (!input.trim() || isStreaming) return + + const userMsg: Message = { id: crypto.randomUUID(), role: 'user', content: input } + const assistantMsg: Message = { id: crypto.randomUUID(), role: 'assistant', content: '' } + setMessages(prev => [...prev, userMsg, assistantMsg]) + setInput('') + setIsStreaming(true) + streamBuffer.current = '' + + try { + const res = await fetch(`${process.env.NEXT_PUBLIC_API_BASE_URL || 'http://localhost:8000'}/api/chat`, { + method: 'POST', + headers: { 'Content-Type': 'application/json' }, + body: JSON.stringify({ + developer_message: developerMessage, + user_message: userMsg.content, + model, + api_key: apiKey, + }), + }) + + if (!res.ok || !res.body) { + throw new Error(`Request failed: ${res.status}`) + } + + const reader = res.body.getReader() + const decoder = new TextDecoder() + while (true) { + const { value, done } = await reader.read() + if (done) break + const chunk = decoder.decode(value, { stream: true }) + streamBuffer.current += chunk + setMessages(prev => prev.map(m => (m.id === assistantMsg.id ? { ...m, content: streamBuffer.current } : m))) + } + } catch (err: unknown) { + const message = err instanceof Error ? err.message : 'Unknown error' + setMessages(prev => prev.map(m => (m.id === assistantMsg.id ? { ...m, content: `Error: ${message}` } : m))) + } finally { + setIsStreaming(false) + } + } + + return ( +
+
+

OpenAI Chat

+
+ setApiKey(e.target.value)} + /> + setModel(e.target.value)} + /> + setDeveloperMessage(e.target.value)} + /> +
+
+ +
+ {messages.length === 0 ? ( +

Start a conversation below. Responses stream in real time.

+ ) : ( +
+ {messages.map(m => ( +
+
+ {m.content || (isStreaming && m.role === 'assistant' ? '…' : '')} +
+
+ ))} +
+ )} +
+ +