diff --git a/.agents/tools/ui/ai-chat-sidebar.md b/.agents/tools/ui/ai-chat-sidebar.md new file mode 100644 index 0000000000..4667555dc9 --- /dev/null +++ b/.agents/tools/ui/ai-chat-sidebar.md @@ -0,0 +1,460 @@ +--- +description: AI chat sidebar component architecture — design, state management, and integration patterns +mode: subagent +tools: + read: true + write: true + edit: true + bash: true + glob: true + grep: true + task: true +--- + +# AI Chat Sidebar — Component Architecture + + + +## Quick Reference + +- **Purpose**: Collapsible AI chat panel integrated into the aidevops dashboard +- **Stack**: React 19 + TypeScript + Tailwind CSS + Elysia (API) +- **State**: React Context with cookie persistence (matches existing sidebar patterns) +- **Streaming**: Server-Sent Events (SSE) from Elysia backend +- **Source**: `.opencode/ui/chat-sidebar/` + +**Sibling tasks**: + +| Task | Scope | Depends on | +|------|-------|------------| +| t005.1 | Architecture & types (this doc) | — | +| t005.2 | Collapsible panel, resize, toggle | t005.1 | +| t005.3 | Chat message UI, streaming, markdown | t005.1, t005.2 | +| t005.4 | AI backend integration, context, API routing | t005.1 | + + + +## Architecture Overview + +```text +┌─────────────────────────────────────────────────────────┐ +│ Dashboard Layout (Elysia serves SPA) │ +│ ┌──────────────────────┐ ┌──────────────────────────┐ │ +│ │ Main Content Area │ │ AI Chat Sidebar │ │ +│ │ │ │ ┌────────────────────┐ │ │ +│ │ (MCP Dashboard, │ │ │ Header + Controls │ │ │ +│ │ Quality Metrics, │ │ ├────────────────────┤ │ │ +│ │ etc.) │ │ │ Message List │ │ │ +│ │ │ │ │ ├─ UserMessage │ │ │ +│ │ │ │ │ ├─ AssistantMsg │ │ │ +│ │ │ │ │ └─ StreamingMsg │ │ │ +│ │ │ │ ├────────────────────┤ │ │ +│ │ │ │ │ Input Area │ │ │ +│ │ │ │ │ ├─ TextArea │ │ │ +│ │ │ │ │ └─ Send Button │ │ │ +│ │ │ │ └────────────────────┘ │ │ +│ └──────────────────────┘ └──────────────────────────┘ │ +│ [Toggle Button - fixed position when sidebar closed] │ +└─────────────────────────────────────────────────────────┘ +``` + +## Design Decisions + +### 1. React for the sidebar, vanilla for existing dashboard + +**Decision**: Introduce React only for the chat sidebar, not rewrite the existing dashboard. + +**Rationale**: The existing MCP dashboard (`.opencode/server/mcp-dashboard.ts`) uses inline HTML with vanilla JS and works well for its purpose (status cards, start/stop buttons). The chat sidebar requires complex interactive state (streaming messages, resize handles, conversation history, markdown rendering) that vanilla JS handles poorly. React is scoped to the sidebar mount point only. + +**Integration**: Elysia serves a new route (`/chat`) with a React SPA, or the sidebar is injected as a Web Component / iframe into the existing dashboard. The cleanest approach is a standalone React app served by Elysia at `/chat` that can also be embedded. + +### 2. React Context for state (not Redux/Zustand) + +**Decision**: Use React Context with the pattern from `tools/ui/react-context.md`. + +**Rationale**: The sidebar has 3 concerns — panel state (open/width), conversation state (messages/streaming), and settings (model, context). These map cleanly to 3 split contexts (per the performance guidance in react-context.md). The app is small enough that Context avoids unnecessary dependencies. Cookie persistence for panel state matches the existing pattern. + +### 3. SSE for streaming (not WebSocket) + +**Decision**: Use Server-Sent Events for AI response streaming. + +**Rationale**: The chat is unidirectional streaming (server → client for AI responses). SSE is simpler than WebSocket for this use case, works through proxies/CDNs, and auto-reconnects. User messages are sent via POST. The existing dashboard already uses WebSocket for real-time MCP status updates — SSE avoids conflating the two concerns. + +### 4. Elysia API routes for backend + +**Decision**: Add chat API routes to the existing Elysia server (or a new Elysia instance on a separate port). + +**Rationale**: Elysia already handles the API gateway and MCP dashboard. Adding `/api/chat/*` routes keeps the backend unified. The chat backend proxies to the configured AI provider (Anthropic, OpenRouter, etc.) using the existing credential system. + +## File Structure + +```text +.opencode/ui/chat-sidebar/ +├── types.ts # Shared type definitions (t005.1) +├── constants.ts # Configuration constants (t005.1) +├── context/ +│ ├── sidebar-context.tsx # Panel open/close/width state (t005.2) +│ ├── chat-context.tsx # Conversation state + streaming (t005.3) +│ └── settings-context.tsx # Model selection, context config (t005.4) +├── components/ +│ ├── ChatSidebar.tsx # Root sidebar component (t005.2) +│ ├── ChatHeader.tsx # Title, model selector, close button (t005.2) +│ ├── MessageList.tsx # Scrollable message container (t005.3) +│ ├── ChatMessage.tsx # Individual message (user/assistant) (t005.3) +│ ├── StreamingMessage.tsx # In-progress streaming response (t005.3) +│ ├── ChatInput.tsx # Text input + send button (t005.3) +│ ├── ResizeHandle.tsx # Drag-to-resize sidebar width (t005.2) +│ └── ToggleButton.tsx # Floating button when sidebar closed (t005.2) +├── hooks/ +│ ├── use-chat.ts # Chat operations hook (t005.3/t005.4) +│ ├── use-streaming.ts # SSE streaming hook (t005.3) +│ └── use-resize.ts # Resize drag handler (t005.2) +├── lib/ +│ ├── api-client.ts # Chat API client (t005.4) +│ ├── markdown.ts # Markdown rendering utilities (t005.3) +│ └── storage.ts # Cookie/localStorage persistence (t005.2) +└── index.tsx # Entry point, provider composition (t005.2) + +.opencode/server/ +├── chat-api.ts # Elysia chat API routes (t005.4) +└── (existing files unchanged) +``` + +## Type Definitions + +See `.opencode/ui/chat-sidebar/types.ts` for the complete type system. Key types: + +### Message Types + +```typescript +type MessageRole = 'user' | 'assistant' | 'system' +type MessageStatus = 'pending' | 'streaming' | 'complete' | 'error' + +interface ChatMessage { + id: string + role: MessageRole + content: string + status: MessageStatus + timestamp: number + model?: string // Which model responded + tokenCount?: number // Response token count + error?: string // Error message if status === 'error' +} +``` + +### Conversation Types + +```typescript +interface Conversation { + id: string + title: string + messages: ChatMessage[] + createdAt: number + updatedAt: number + model: string // Default model for this conversation + contextSources: ContextSource[] // What context is injected +} + +interface ContextSource { + type: 'file' | 'directory' | 'memory' | 'agent' | 'custom' + path: string + label: string + enabled: boolean +} +``` + +### Sidebar State Types + +```typescript +interface SidebarState { + open: boolean + width: number // Current width in pixels + position: 'right' | 'left' +} + +interface ChatState { + conversations: Conversation[] + activeConversationId: string | null + isStreaming: boolean + streamingContent: string // Partial content during streaming +} + +interface SettingsState { + defaultModel: string // e.g., 'sonnet', 'opus', 'haiku' + contextSources: ContextSource[] + maxTokens: number + temperature: number +} +``` + +## State Management + +### Three Split Contexts + +Following the performance pattern from `tools/ui/react-context.md` — split by update frequency: + +| Context | Update Frequency | Persistence | Scope | +|---------|-----------------|-------------|-------| +| `SidebarContext` | On user interaction (toggle/resize) | Cookie (7 days) | Panel open/close, width | +| `ChatContext` | On every message/stream chunk | localStorage (conversations) | Messages, streaming state | +| `SettingsContext` | Rarely (user preference changes) | Cookie (30 days) | Model, temperature, context | + +### Provider Composition + +```tsx +// index.tsx — provider nesting order (outer = least frequent updates) + + + + + + + +``` + +### SidebarContext Pattern + +```tsx +// Follows react-context.md pattern exactly +const SIDEBAR_COOKIE = 'ai_chat_sidebar_state' +const WIDTH_COOKIE = 'ai_chat_sidebar_width' +const DEFAULT_WIDTH = 420 +const MIN_WIDTH = 320 +const MAX_WIDTH = 640 + +interface SidebarContextProps { + open: boolean + setOpen: (open: boolean) => void + toggleSidebar: () => void + width: number + setWidth: (width: number) => void +} + +// Hook with safe fallback for usage outside provider +function useSidebar(): SidebarContextProps { + const context = useContext(SidebarContext) + if (!context) { + return { + open: false, + setOpen: () => {}, + toggleSidebar: () => {}, + width: DEFAULT_WIDTH, + setWidth: () => {}, + } + } + return context +} +``` + +### ChatContext — Streaming Integration + +```tsx +interface ChatContextProps { + conversations: Conversation[] + activeConversation: Conversation | null + isStreaming: boolean + streamingContent: string + sendMessage: (content: string) => Promise + stopStreaming: () => void + newConversation: () => void + switchConversation: (id: string) => void + deleteConversation: (id: string) => void +} +``` + +The `sendMessage` flow: + +1. Add user message to active conversation (optimistic) +2. Create assistant message with `status: 'streaming'` +3. Open SSE connection to `/api/chat/stream` +4. Accumulate `streamingContent` on each SSE event +5. On stream end, finalize message with `status: 'complete'` +6. Persist conversation to localStorage + +## API Design + +### Elysia Chat Routes (`chat-api.ts`) + +```text +POST /api/chat/send — Send message, get full response (non-streaming) +POST /api/chat/stream — Send message, get SSE stream +GET /api/chat/conversations — List conversations (from server-side storage) +GET /api/chat/models — List available models + their status +POST /api/chat/context — Resolve context sources to content +``` + +### SSE Stream Format + +```text +event: start +data: {"conversationId": "abc", "model": "claude-sonnet-4-20250514"} + +event: delta +data: {"content": "Here is"} + +event: delta +data: {"content": " the answer"} + +event: done +data: {"tokenCount": 150, "model": "claude-sonnet-4-20250514"} + +event: error +data: {"message": "Rate limit exceeded", "code": "rate_limited"} +``` + +### Context Injection + +The chat API resolves `ContextSource[]` before sending to the AI provider: + +| Source Type | Resolution | +|-------------|-----------| +| `file` | Read file content (with line range support) | +| `directory` | List files + read key files | +| `memory` | Query memory-helper.sh for relevant memories | +| `agent` | Read agent markdown file | +| `custom` | User-provided text | + +Context is prepended as a system message, keeping the conversation messages clean. + +## Component Specifications + +### ChatSidebar (root) + +- Renders as a fixed-position panel on the right edge +- Uses CSS `transform: translateX()` for open/close animation (compositor-only per ui-skills.md) +- Width controlled by CSS variable `--ai-sidebar-width` +- Keyboard shortcut: `Cmd+Shift+L` to toggle (matches common IDE patterns) + +### ResizeHandle + +- Vertical drag handle on the left edge of the sidebar +- Uses `pointer-events` and `user-select: none` during drag +- Clamps width between `MIN_WIDTH` (320px) and `MAX_WIDTH` (640px) +- Persists final width to cookie on `pointerup` + +### MessageList + +- Virtualized scrolling for long conversations (use native `overflow-y: auto` initially, upgrade to virtual list if performance requires) +- Auto-scrolls to bottom on new messages +- Preserves scroll position when user scrolls up (reading history) +- Shows date separators between messages from different days + +### ChatMessage + +- User messages: right-aligned, accent background +- Assistant messages: left-aligned, neutral background +- Markdown rendering for assistant messages (code blocks with syntax highlighting) +- Copy button on code blocks +- Token count display (subtle, bottom-right) + +### StreamingMessage + +- Extends ChatMessage with a blinking cursor indicator +- Content updates on each SSE delta event +- "Stop generating" button appears during streaming + +### ChatInput + +- Auto-expanding textarea (grows with content, max 6 lines) +- Send on `Enter`, newline on `Shift+Enter` +- Disabled during streaming +- Character count indicator (subtle) +- File attachment button (future — not in initial scope) + +### ToggleButton + +- Fixed position, bottom-right corner +- Only visible when sidebar is closed +- Uses `useSidebarOptional()` — renders nothing if no provider (per react-context.md pattern) +- Accessible: `aria-label="Open AI chat"` + +## Accessibility + +Per `tools/ui/ui-skills.md` requirements: + +- All interactive elements have `aria-label` or visible label +- Keyboard navigation: Tab through controls, Escape to close sidebar +- Focus trap when sidebar is open (optional — depends on whether it overlays content) +- `prefers-reduced-motion`: disable slide animation, use instant show/hide +- Screen reader announcements for new messages (`aria-live="polite"`) +- No `h-screen` — use `h-dvh` for mobile viewport + +## Performance Considerations + +- **Context splitting**: Three contexts prevent unnecessary re-renders (sidebar resize doesn't re-render messages) +- **Memoization**: `useCallback` for all setters, `useMemo` for derived state +- **Streaming**: SSE chunks update a single `streamingContent` string, not the full message array +- **Markdown**: Lazy-load markdown renderer (only when assistant messages exist) +- **No `will-change`** unless actively animating (per ui-skills.md) +- **No `useEffect` for render logic** (per ui-skills.md) + +## Dependencies + +New dependencies required (to be added to `package.json`): + +| Package | Purpose | Size | +|---------|---------|------| +| `react` | UI framework | ~45KB gzipped | +| `react-dom` | DOM rendering | ~40KB gzipped | +| `@types/react` | TypeScript types | dev only | +| `@types/react-dom` | TypeScript types | dev only | + +Optional (evaluate during implementation): + +| Package | Purpose | Alternative | +|---------|---------|------------| +| `marked` or `markdown-it` | Markdown rendering | Custom minimal parser | +| `highlight.js` or `shiki` | Code syntax highlighting | CSS-only basic highlighting | + +**No additional state management libraries needed** — React Context is sufficient. + +## Integration with Existing Systems + +### Credential Routing + +The chat API uses the existing aidevops credential system: + +```typescript +// chat-api.ts +const apiKey = await getCredential('ANTHROPIC_API_KEY') +// Falls back to: gopass → credentials.sh → environment variable +``` + +### Model Routing + +Integrates with the existing model routing system: + +```typescript +// Resolve model tier to concrete model +const model = await resolveModel(settings.defaultModel) +// 'sonnet' → 'claude-sonnet-4-20250514' (with fallback chain) +``` + +### Memory Integration + +Chat can query cross-session memory for context: + +```typescript +// Resolve memory context source +const memories = await execCommand('memory-helper.sh', ['recall', query, '--limit', '5']) +``` + +## Testing Strategy + +| Layer | Tool | What to test | +|-------|------|-------------| +| Types | `tsc --noEmit` | Type correctness | +| Components | Bun test + React Testing Library | Render, interaction, accessibility | +| Hooks | Bun test | State transitions, streaming lifecycle | +| API | Bun test + Elysia test client | Route responses, SSE format, error handling | +| E2E | Playwright | Full sidebar flow (open, send, receive, close) | + +## Migration Path + +This architecture supports incremental delivery across t005.2-t005.4: + +1. **t005.2**: SidebarContext + ChatSidebar + ResizeHandle + ToggleButton (panel works, no chat) +2. **t005.3**: ChatContext + MessageList + ChatMessage + StreamingMessage + ChatInput (chat works with mock data) +3. **t005.4**: SettingsContext + chat-api.ts + api-client.ts (real AI responses) + +Each task produces a working increment that can be reviewed independently. diff --git a/.opencode/ui/chat-sidebar/constants.ts b/.opencode/ui/chat-sidebar/constants.ts new file mode 100644 index 0000000000..0f59fd2f1b --- /dev/null +++ b/.opencode/ui/chat-sidebar/constants.ts @@ -0,0 +1,172 @@ +/** + * AI Chat Sidebar — Configuration Constants + * + * Centralized constants for the chat sidebar feature. + * Adjust these to change default behavior without modifying component logic. + * + * @see .agents/tools/ui/ai-chat-sidebar.md for architecture docs + */ + +import type { ModelTier, SidebarPosition } from './types' + +// ============================================ +// Sidebar Panel +// ============================================ + +/** Default sidebar width in pixels */ +export const DEFAULT_SIDEBAR_WIDTH = 420 + +/** Minimum sidebar width (resize clamp) */ +export const MIN_SIDEBAR_WIDTH = 320 + +/** Maximum sidebar width (resize clamp) */ +export const MAX_SIDEBAR_WIDTH = 640 + +/** Default sidebar position */ +export const DEFAULT_SIDEBAR_POSITION: SidebarPosition = 'right' + +/** Cookie name for sidebar open/close state */ +export const SIDEBAR_STATE_COOKIE = 'ai_chat_sidebar_state' + +/** Cookie name for sidebar width */ +export const SIDEBAR_WIDTH_COOKIE = 'ai_chat_sidebar_width' + +/** Cookie max age: 7 days (seconds) */ +export const SIDEBAR_COOKIE_MAX_AGE = 60 * 60 * 24 * 7 + +// ============================================ +// Chat +// ============================================ + +/** localStorage key for conversation data */ +export const CONVERSATIONS_STORAGE_KEY = 'ai_chat_conversations' + +/** Maximum conversations to keep in storage */ +export const MAX_STORED_CONVERSATIONS = 50 + +/** Maximum messages per conversation before truncation */ +export const MAX_MESSAGES_PER_CONVERSATION = 200 + +/** Default title for new conversations */ +export const DEFAULT_CONVERSATION_TITLE = 'New conversation' + +// ============================================ +// Settings +// ============================================ + +/** Default model tier for new conversations */ +export const DEFAULT_MODEL: ModelTier = 'sonnet' + +/** Default max tokens for AI responses */ +export const DEFAULT_MAX_TOKENS = 4096 + +/** Default temperature for AI responses */ +export const DEFAULT_TEMPERATURE = 0.7 + +/** Cookie name for settings */ +export const SETTINGS_COOKIE = 'ai_chat_settings' + +/** Settings cookie max age: 30 days (seconds) */ +export const SETTINGS_COOKIE_MAX_AGE = 60 * 60 * 24 * 30 + +// ============================================ +// Input +// ============================================ + +/** Maximum lines before textarea stops growing */ +export const INPUT_MAX_LINES = 6 + +/** Maximum character count for a single message */ +export const INPUT_MAX_CHARS = 32_000 + +// ============================================ +// Streaming +// ============================================ + +/** SSE reconnect delay in milliseconds */ +export const SSE_RECONNECT_DELAY = 3000 + +/** SSE connection timeout in milliseconds */ +export const SSE_TIMEOUT = 120_000 + +// ============================================ +// API +// ============================================ + +/** Base path for chat API routes */ +export const CHAT_API_BASE = '/api/chat' + +/** Chat API endpoints */ +export const CHAT_API = { + send: `${CHAT_API_BASE}/send`, + stream: `${CHAT_API_BASE}/stream`, + conversations: `${CHAT_API_BASE}/conversations`, + models: `${CHAT_API_BASE}/models`, + context: `${CHAT_API_BASE}/context`, +} as const + +// ============================================ +// Keyboard Shortcuts +// ============================================ + +/** Toggle sidebar shortcut */ +export const TOGGLE_SHORTCUT = { + key: 'l', + metaKey: true, + shiftKey: true, + label: '⌘⇧L', +} as const + +/** Send message shortcut (Enter without Shift) */ +export const SEND_SHORTCUT = { + key: 'Enter', + shiftKey: false, + label: 'Enter', +} as const + +/** New line shortcut (Shift+Enter) */ +export const NEWLINE_SHORTCUT = { + key: 'Enter', + shiftKey: true, + label: '⇧Enter', +} as const + +// ============================================ +// Accessibility +// ============================================ + +/** ARIA labels */ +export const ARIA = { + sidebar: 'AI chat sidebar', + toggleButton: 'Open AI chat', + closeButton: 'Close AI chat', + sendButton: 'Send message', + stopButton: 'Stop generating', + messageList: 'Chat messages', + input: 'Type a message', + resizeHandle: 'Resize chat sidebar', + newChat: 'Start new conversation', +} as const + +// ============================================ +// CSS Custom Properties +// ============================================ + +/** CSS variable names used by the sidebar */ +export const CSS_VARS = { + sidebarWidth: '--ai-sidebar-width', + sidebarTransition: '--ai-sidebar-transition', +} as const + +// ============================================ +// Model Display Names +// ============================================ + +/** Human-readable names for model tiers */ +export const MODEL_DISPLAY_NAMES: Record = { + haiku: 'Haiku (fast)', + flash: 'Flash (balanced)', + sonnet: 'Sonnet (default)', + pro: 'Pro (advanced)', + opus: 'Opus (best)', +} diff --git a/.opencode/ui/chat-sidebar/context/chat-context.tsx b/.opencode/ui/chat-sidebar/context/chat-context.tsx new file mode 100644 index 0000000000..56b09f42de --- /dev/null +++ b/.opencode/ui/chat-sidebar/context/chat-context.tsx @@ -0,0 +1,193 @@ +/** + * Chat Context — Conversation state and streaming + * + * Manages conversations, messages, and streaming state. + * Persists conversations to localStorage. + * + * Implementation task: t005.3 + * @see .agents/tools/ui/ai-chat-sidebar.md + */ + +'use client' + +import { + createContext, + useCallback, + useContext, + useMemo, + useState, +} from 'react' +import type { ReactNode } from 'react' +import type { + ChatState, + Conversation, + UseChatReturn, +} from '../types' +import { + CONVERSATIONS_STORAGE_KEY, + DEFAULT_CONVERSATION_TITLE, + DEFAULT_MODEL, + MAX_STORED_CONVERSATIONS, +} from '../constants' + +// ============================================ +// Context +// ============================================ + +const ChatContext = createContext(null) + +// ============================================ +// Hooks +// ============================================ + +/** + * Access chat state and operations. + * Returns safe no-op defaults when used outside provider. + */ +export function useChat(): UseChatReturn { + const context = useContext(ChatContext) + if (!context) { + return { + conversations: [], + activeConversation: null, + isStreaming: false, + streamingContent: '', + sendMessage: async () => {}, + stopStreaming: () => {}, + newConversation: () => {}, + switchConversation: () => {}, + deleteConversation: () => {}, + } + } + return context +} + +// ============================================ +// Storage Helpers +// ============================================ + +function loadConversations(): Conversation[] { + try { + const stored = localStorage.getItem(CONVERSATIONS_STORAGE_KEY) + if (!stored) return [] + return JSON.parse(stored) as Conversation[] + } catch { + return [] + } +} + +function saveConversations(conversations: Conversation[]): void { + try { + // Keep only the most recent conversations + const trimmed = conversations.slice(0, MAX_STORED_CONVERSATIONS) + localStorage.setItem(CONVERSATIONS_STORAGE_KEY, JSON.stringify(trimmed)) + } catch { + // localStorage may be full or unavailable — fail silently + } +} + +function generateId(): string { + return crypto.randomUUID() +} + +// ============================================ +// Provider +// ============================================ + +interface ChatProviderProps { + readonly children: ReactNode +} + +export function ChatProvider({ children }: ChatProviderProps) { + const [conversations, setConversations] = useState(loadConversations) + const [activeConversationId, setActiveConversationId] = useState( + () => conversations[0]?.id ?? null, + ) + const [isStreaming, setIsStreaming] = useState(false) + const [streamingContent, setStreamingContent] = useState('') + + // Derived state + const activeConversation = useMemo( + () => conversations.find((c) => c.id === activeConversationId) ?? null, + [conversations, activeConversationId], + ) + + const newConversation = useCallback(() => { + const conversation: Conversation = { + id: generateId(), + title: DEFAULT_CONVERSATION_TITLE, + messages: [], + createdAt: Date.now(), + updatedAt: Date.now(), + model: DEFAULT_MODEL, + contextSources: [], + } + setConversations((prev) => { + const updated = [conversation, ...prev] + saveConversations(updated) + return updated + }) + setActiveConversationId(conversation.id) + }, []) + + const switchConversation = useCallback((id: string) => { + setActiveConversationId(id) + }, []) + + const deleteConversation = useCallback((id: string) => { + setConversations((prev) => { + const updated = prev.filter((c) => c.id !== id) + saveConversations(updated) + return updated + }) + setActiveConversationId((prevId) => (prevId === id ? null : prevId)) + }, []) + + const sendMessage = useCallback(async (content: string) => { + // Stub — full implementation in t005.3 with streaming hook + // This will: + // 1. Add user message to active conversation + // 2. Create assistant message with status: 'streaming' + // 3. Open SSE connection + // 4. Accumulate streamingContent + // 5. Finalize on stream end + void content + }, []) + + const stopStreaming = useCallback(() => { + // Stub — full implementation in t005.3 + setIsStreaming(false) + setStreamingContent('') + }, []) + + const contextValue = useMemo( + () => ({ + conversations, + activeConversation, + isStreaming, + streamingContent, + sendMessage, + stopStreaming, + newConversation, + switchConversation, + deleteConversation, + }), + [ + conversations, + activeConversation, + isStreaming, + streamingContent, + sendMessage, + stopStreaming, + newConversation, + switchConversation, + deleteConversation, + ], + ) + + return ( + + {children} + + ) +} diff --git a/.opencode/ui/chat-sidebar/context/settings-context.tsx b/.opencode/ui/chat-sidebar/context/settings-context.tsx new file mode 100644 index 0000000000..f1a3b05367 --- /dev/null +++ b/.opencode/ui/chat-sidebar/context/settings-context.tsx @@ -0,0 +1,209 @@ +/** + * Settings Context — Model selection and AI configuration + * + * Manages user preferences for AI interactions. + * Persists to cookies (long-lived, rarely changes). + * + * Implementation task: t005.4 + * @see .agents/tools/ui/ai-chat-sidebar.md + */ + +'use client' + +import { + createContext, + useCallback, + useContext, + useMemo, + useState, +} from 'react' +import type { ReactNode } from 'react' +import type { + ContextSource, + ModelTier, + SettingsState, +} from '../types' +import { + DEFAULT_MAX_TOKENS, + DEFAULT_MODEL, + DEFAULT_TEMPERATURE, + SETTINGS_COOKIE, + SETTINGS_COOKIE_MAX_AGE, +} from '../constants' + +// ============================================ +// Context Interface +// ============================================ + +interface SettingsContextProps { + settings: SettingsState + setDefaultModel: (model: ModelTier) => void + setMaxTokens: (tokens: number) => void + setTemperature: (temp: number) => void + addContextSource: (source: ContextSource) => void + removeContextSource: (path: string) => void + toggleContextSource: (path: string) => void +} + +// ============================================ +// Context +// ============================================ + +const SettingsContext = createContext(null) + +// ============================================ +// Hooks +// ============================================ + +/** + * Access settings state and operations. + * Returns safe defaults when used outside provider. + */ +export function useSettings(): SettingsContextProps { + const context = useContext(SettingsContext) + if (!context) { + return { + settings: { + defaultModel: DEFAULT_MODEL, + contextSources: [], + maxTokens: DEFAULT_MAX_TOKENS, + temperature: DEFAULT_TEMPERATURE, + }, + setDefaultModel: () => {}, + setMaxTokens: () => {}, + setTemperature: () => {}, + addContextSource: () => {}, + removeContextSource: () => {}, + toggleContextSource: () => {}, + } + } + return context +} + +// ============================================ +// Cookie Helpers +// ============================================ + +function persistSettings(settings: SettingsState): void { + try { + const value = encodeURIComponent(JSON.stringify(settings)) + document.cookie = `${SETTINGS_COOKIE}=${value}; path=/; max-age=${SETTINGS_COOKIE_MAX_AGE}` + } catch { + // Cookie may be too large — fail silently + } +} + +function loadSettings(): SettingsState | null { + try { + const match = document.cookie.match( + new RegExp(`(?:^|; )${SETTINGS_COOKIE}=([^;]*)`), + ) + if (!match) return null + return JSON.parse(decodeURIComponent(match[1])) as SettingsState + } catch { + return null + } +} + +// ============================================ +// Provider +// ============================================ + +interface SettingsProviderProps { + readonly children: ReactNode + readonly defaultModel?: ModelTier +} + +export function SettingsProvider({ + children, + defaultModel = DEFAULT_MODEL, +}: SettingsProviderProps) { + const [settings, setSettings] = useState(() => { + const stored = loadSettings() + return stored ?? { + defaultModel, + contextSources: [], + maxTokens: DEFAULT_MAX_TOKENS, + temperature: DEFAULT_TEMPERATURE, + } + }) + + const updateSettings = useCallback((updater: (prev: SettingsState) => SettingsState) => { + setSettings((prev) => { + const next = updater(prev) + persistSettings(next) + return next + }) + }, []) + + const setDefaultModel = useCallback( + (model: ModelTier) => updateSettings((s) => ({ ...s, defaultModel: model })), + [updateSettings], + ) + + const setMaxTokens = useCallback( + (tokens: number) => updateSettings((s) => ({ ...s, maxTokens: tokens })), + [updateSettings], + ) + + const setTemperature = useCallback( + (temp: number) => updateSettings((s) => ({ ...s, temperature: Math.min(1, Math.max(0, temp)) })), + [updateSettings], + ) + + const addContextSource = useCallback( + (source: ContextSource) => + updateSettings((s) => ({ + ...s, + contextSources: [...s.contextSources, source], + })), + [updateSettings], + ) + + const removeContextSource = useCallback( + (path: string) => + updateSettings((s) => ({ + ...s, + contextSources: s.contextSources.filter((cs) => cs.path !== path), + })), + [updateSettings], + ) + + const toggleContextSource = useCallback( + (path: string) => + updateSettings((s) => ({ + ...s, + contextSources: s.contextSources.map((cs) => + cs.path === path ? { ...cs, enabled: !cs.enabled } : cs, + ), + })), + [updateSettings], + ) + + const contextValue = useMemo( + () => ({ + settings, + setDefaultModel, + setMaxTokens, + setTemperature, + addContextSource, + removeContextSource, + toggleContextSource, + }), + [ + settings, + setDefaultModel, + setMaxTokens, + setTemperature, + addContextSource, + removeContextSource, + toggleContextSource, + ], + ) + + return ( + + {children} + + ) +} diff --git a/.opencode/ui/chat-sidebar/context/sidebar-context.tsx b/.opencode/ui/chat-sidebar/context/sidebar-context.tsx new file mode 100644 index 0000000000..95089df1a4 --- /dev/null +++ b/.opencode/ui/chat-sidebar/context/sidebar-context.tsx @@ -0,0 +1,140 @@ +/** + * Sidebar Context — Panel open/close and width state + * + * Manages the physical sidebar panel state with cookie persistence. + * Follows the pattern from .agents/tools/ui/react-context.md exactly. + * + * Implementation task: t005.2 + * @see .agents/tools/ui/ai-chat-sidebar.md + */ + +'use client' + +import { + createContext, + useCallback, + useContext, + useMemo, + useState, +} from 'react' +import type { ReactNode } from 'react' +import type { SidebarState } from '../types' +import { + DEFAULT_SIDEBAR_WIDTH, + MAX_SIDEBAR_WIDTH, + MIN_SIDEBAR_WIDTH, + SIDEBAR_COOKIE_MAX_AGE, + SIDEBAR_STATE_COOKIE, + SIDEBAR_WIDTH_COOKIE, + CSS_VARS, +} from '../constants' + +// ============================================ +// Context Interface +// ============================================ + +interface SidebarContextProps { + open: boolean + setOpen: (open: boolean) => void + toggleSidebar: () => void + width: number + setWidth: (width: number) => void +} + +// ============================================ +// Context +// ============================================ + +const SidebarContext = createContext(null) + +// ============================================ +// Hooks +// ============================================ + +/** + * Access sidebar state. Returns safe defaults when used outside provider. + */ +export function useSidebar(): SidebarContextProps { + const context = useContext(SidebarContext) + if (!context) { + return { + open: false, + setOpen: () => {}, + toggleSidebar: () => {}, + width: DEFAULT_SIDEBAR_WIDTH, + setWidth: () => {}, + } + } + return context +} + +/** + * Access sidebar state, returning null when outside provider. + * Use for conditional rendering (e.g., ToggleButton that hides when no provider). + */ +export function useSidebarOptional(): SidebarContextProps | null { + return useContext(SidebarContext) +} + +// ============================================ +// Cookie Helpers +// ============================================ + +function setCookie(name: string, value: string, maxAge: number): void { + document.cookie = `${name}=${value}; path=/; max-age=${maxAge}` +} + +function getCookie(name: string): string | null { + const match = document.cookie.match(new RegExp(`(?:^|; )${name}=([^;]*)`)) + return match ? decodeURIComponent(match[1]) : null +} + +// ============================================ +// Provider +// ============================================ + +interface SidebarProviderProps { + readonly children: ReactNode + readonly defaultOpen?: boolean + readonly defaultWidth?: number +} + +export function SidebarProvider({ + children, + defaultOpen = false, + defaultWidth = DEFAULT_SIDEBAR_WIDTH, +}: SidebarProviderProps) { + const [open, setOpenState] = useState(defaultOpen) + const [width, setWidthState] = useState(defaultWidth) + + const setOpen = useCallback((value: boolean) => { + setOpenState(value) + setCookie(SIDEBAR_STATE_COOKIE, String(value), SIDEBAR_COOKIE_MAX_AGE) + }, []) + + const toggleSidebar = useCallback(() => { + setOpenState((prev) => { + const newValue = !prev + setCookie(SIDEBAR_STATE_COOKIE, String(newValue), SIDEBAR_COOKIE_MAX_AGE) + return newValue + }) + }, []) + + const setWidth = useCallback((value: number) => { + const clamped = Math.min(Math.max(value, MIN_SIDEBAR_WIDTH), MAX_SIDEBAR_WIDTH) + setWidthState(clamped) + setCookie(SIDEBAR_WIDTH_COOKIE, String(clamped), SIDEBAR_COOKIE_MAX_AGE) + }, []) + + const contextValue = useMemo( + () => ({ open, setOpen, toggleSidebar, width, setWidth }), + [open, setOpen, toggleSidebar, width, setWidth], + ) + + return ( + + + {children} + + ) +} diff --git a/.opencode/ui/chat-sidebar/hooks/use-chat.ts b/.opencode/ui/chat-sidebar/hooks/use-chat.ts new file mode 100644 index 0000000000..5fa452b18c --- /dev/null +++ b/.opencode/ui/chat-sidebar/hooks/use-chat.ts @@ -0,0 +1,89 @@ +/** + * useChat — Orchestration hook combining chat context with streaming + * + * Bridges ChatContext state with the useStreaming hook. + * Handles the full send → stream → finalize lifecycle. + * + * Implementation task: t005.3 / t005.4 + * @see .agents/tools/ui/ai-chat-sidebar.md "sendMessage flow" + */ + +import { useCallback, useEffect, useRef } from 'react' +import type { ChatMessage, ChatRequest } from '../types' +import { useChat as useChatContext } from '../context/chat-context' +import { useSettings } from '../context/settings-context' +import { useStreaming } from './use-streaming' + +/** + * Orchestration hook that combines chat context with streaming. + * + * This hook is the primary interface for components that need to + * send messages and display streaming responses. + * + * Usage: + * const chat = useChatOrchestrator() + * await chat.send('Hello, AI!') + */ +export function useChatOrchestrator() { + const chatContext = useChatContext() + const { settings } = useSettings() + const streaming = useStreaming() + const streamingMessageIdRef = useRef(null) + + // When streaming content updates, sync to chat context + // (Full implementation in t005.3 — this is the integration pattern) + useEffect(() => { + if (streaming.isStreaming && streaming.content) { + // Update the streaming message content in the active conversation + // This will be implemented when ChatContext gets message mutation methods + } + }, [streaming.isStreaming, streaming.content]) + + // When streaming completes, finalize the message + useEffect(() => { + if (!streaming.isStreaming && streamingMessageIdRef.current && streaming.content) { + // Finalize: update message status to 'complete', set final content + // This will be implemented when ChatContext gets message mutation methods + streamingMessageIdRef.current = null + } + }, [streaming.isStreaming, streaming.content]) + + const send = useCallback( + async (content: string) => { + const conversation = chatContext.activeConversation + if (!conversation) { + // Auto-create a conversation if none exists + chatContext.newConversation() + // Note: need to wait for state update — full implementation in t005.3 + return + } + + // Build the request + const request: ChatRequest = { + conversationId: conversation.id, + message: content, + model: settings.defaultModel, + contextSources: settings.contextSources.filter((s) => s.enabled), + maxTokens: settings.maxTokens, + temperature: settings.temperature, + } + + // Generate a message ID for the streaming response + streamingMessageIdRef.current = crypto.randomUUID() + + // Start streaming + streaming.startStream(request) + }, + [chatContext, settings, streaming], + ) + + return { + ...chatContext, + send, + isStreaming: streaming.isStreaming, + streamingContent: streaming.content, + streamingError: streaming.error, + streamingModel: streaming.model, + stopStreaming: streaming.stopStream, + } +} diff --git a/.opencode/ui/chat-sidebar/hooks/use-resize.ts b/.opencode/ui/chat-sidebar/hooks/use-resize.ts new file mode 100644 index 0000000000..5395b561a9 --- /dev/null +++ b/.opencode/ui/chat-sidebar/hooks/use-resize.ts @@ -0,0 +1,119 @@ +/** + * useResize — Drag-to-resize hook for sidebar width + * + * Handles pointer events for resizing the sidebar panel. + * Clamps width between MIN and MAX, persists on pointer up. + * + * Implementation task: t005.2 + * @see .agents/tools/ui/ai-chat-sidebar.md "ResizeHandle" + */ + +import { useCallback, useRef, useState } from 'react' +import type { UseResizeReturn } from '../types' +import { + DEFAULT_SIDEBAR_WIDTH, + MAX_SIDEBAR_WIDTH, + MIN_SIDEBAR_WIDTH, +} from '../constants' + +interface UseResizeOptions { + /** Initial width */ + initialWidth?: number + /** Callback when width changes (for persisting) */ + onWidthChange?: (width: number) => void + /** Which side the sidebar is on (affects drag direction) */ + position?: 'left' | 'right' +} + +/** + * Hook for drag-to-resize behavior on the sidebar. + * + * Usage: + * const { width, isDragging, handleProps } = useResize({ + * initialWidth: 420, + * onWidthChange: setWidth, + * }) + * + */ +export function useResize({ + initialWidth = DEFAULT_SIDEBAR_WIDTH, + onWidthChange, + position = 'right', +}: UseResizeOptions = {}): UseResizeReturn { + const [width, setWidth] = useState(initialWidth) + const [isDragging, setIsDragging] = useState(false) + const startXRef = useRef(0) + const startWidthRef = useRef(initialWidth) + + const handlePointerMove = useCallback( + (e: PointerEvent) => { + const delta = position === 'right' + ? startXRef.current - e.clientX // Dragging left increases width + : e.clientX - startXRef.current // Dragging right increases width + + const newWidth = Math.min( + Math.max(startWidthRef.current + delta, MIN_SIDEBAR_WIDTH), + MAX_SIDEBAR_WIDTH, + ) + + setWidth(newWidth) + }, + [position], + ) + + const handlePointerUp = useCallback( + (e: PointerEvent) => { + setIsDragging(false) + document.body.style.userSelect = '' + document.body.style.cursor = '' + + // Calculate final width + const delta = position === 'right' + ? startXRef.current - e.clientX + : e.clientX - startXRef.current + + const finalWidth = Math.min( + Math.max(startWidthRef.current + delta, MIN_SIDEBAR_WIDTH), + MAX_SIDEBAR_WIDTH, + ) + + // Persist the final width + onWidthChange?.(finalWidth) + + // Clean up global listeners + document.removeEventListener('pointermove', handlePointerMove) + document.removeEventListener('pointerup', handlePointerUp) + }, + [position, onWidthChange, handlePointerMove], + ) + + const handlePointerDown = useCallback( + (e: React.PointerEvent) => { + e.preventDefault() + setIsDragging(true) + startXRef.current = e.clientX + startWidthRef.current = width + + // Prevent text selection during drag + document.body.style.userSelect = 'none' + document.body.style.cursor = 'col-resize' + + // Attach global listeners for drag tracking + document.addEventListener('pointermove', handlePointerMove) + document.addEventListener('pointerup', handlePointerUp) + }, + [width, handlePointerMove, handlePointerUp], + ) + + return { + width, + isDragging, + handleProps: { + onPointerDown: handlePointerDown, + style: { + cursor: 'col-resize', + touchAction: 'none', + }, + }, + } +} diff --git a/.opencode/ui/chat-sidebar/hooks/use-streaming.ts b/.opencode/ui/chat-sidebar/hooks/use-streaming.ts new file mode 100644 index 0000000000..afe1ac6511 --- /dev/null +++ b/.opencode/ui/chat-sidebar/hooks/use-streaming.ts @@ -0,0 +1,154 @@ +/** + * useStreaming — SSE streaming hook for AI responses + * + * Manages a Server-Sent Events connection to the chat API. + * Accumulates delta events into a content string. + * Handles start, delta, done, and error events. + * + * Implementation task: t005.3 + * @see .agents/tools/ui/ai-chat-sidebar.md "SSE Stream Format" + */ + +import { useCallback, useRef, useState } from 'react' +import type { + ChatRequest, + StreamEvent, + UseStreamingReturn, +} from '../types' +import { CHAT_API, SSE_TIMEOUT } from '../constants' + +/** + * Hook for managing SSE streaming from the chat API. + * + * Usage: + * const { isStreaming, content, startStream, stopStream, error } = useStreaming() + * startStream({ conversationId, message, model, ... }) + */ +export function useStreaming(): UseStreamingReturn { + const [isStreaming, setIsStreaming] = useState(false) + const [content, setContent] = useState('') + const [error, setError] = useState(null) + const [model, setModel] = useState(null) + const [tokenCount, setTokenCount] = useState(null) + + // AbortController ref for cancellation + const abortRef = useRef(null) + + const stopStream = useCallback(() => { + if (abortRef.current) { + abortRef.current.abort() + abortRef.current = null + } + setIsStreaming(false) + }, []) + + const startStream = useCallback((request: ChatRequest) => { + // Abort any existing stream + if (abortRef.current) { + abortRef.current.abort() + } + + // Reset state + setContent('') + setError(null) + setModel(null) + setTokenCount(null) + setIsStreaming(true) + + const controller = new AbortController() + abortRef.current = controller + + // Timeout safety + const timeout = setTimeout(() => { + controller.abort() + setError('Stream timed out') + setIsStreaming(false) + }, SSE_TIMEOUT) + + // Start SSE connection via fetch (EventSource doesn't support POST) + fetch(CHAT_API.stream, { + method: 'POST', + headers: { 'Content-Type': 'application/json' }, + body: JSON.stringify(request), + signal: controller.signal, + }) + .then(async (response) => { + if (!response.ok) { + throw new Error(`Stream request failed: ${response.status}`) + } + + const reader = response.body?.getReader() + if (!reader) { + throw new Error('No response body') + } + + const decoder = new TextDecoder() + let buffer = '' + + while (true) { + const { done, value } = await reader.read() + if (done) break + + buffer += decoder.decode(value, { stream: true }) + + // Parse SSE events from buffer + const lines = buffer.split('\n') + buffer = lines.pop() ?? '' // Keep incomplete line in buffer + + let eventType = '' + for (const line of lines) { + if (line.startsWith('event: ')) { + eventType = line.slice(7).trim() + } else if (line.startsWith('data: ')) { + const data = line.slice(6) + try { + const event = JSON.parse(data) as StreamEvent + handleEvent({ ...event, type: eventType as StreamEvent['type'] }) + } catch { + // Malformed JSON — skip + } + } + } + } + }) + .catch((err: Error) => { + if (err.name !== 'AbortError') { + setError(err.message) + } + }) + .finally(() => { + clearTimeout(timeout) + setIsStreaming(false) + abortRef.current = null + }) + + function handleEvent(event: StreamEvent): void { + switch (event.type) { + case 'start': + setModel(event.model) + break + case 'delta': + setContent((prev) => prev + event.content) + break + case 'done': + setTokenCount(event.tokenCount) + setModel(event.model) + break + case 'error': + setError(event.message) + stopStream() + break + } + } + }, [stopStream]) + + return { + isStreaming, + content, + startStream, + stopStream, + error, + model, + tokenCount, + } +} diff --git a/.opencode/ui/chat-sidebar/index.tsx b/.opencode/ui/chat-sidebar/index.tsx new file mode 100644 index 0000000000..ac85d7cef2 --- /dev/null +++ b/.opencode/ui/chat-sidebar/index.tsx @@ -0,0 +1,68 @@ +/** + * AI Chat Sidebar — Entry Point + * + * Composes providers and renders the root sidebar component. + * Provider nesting order: outer = least frequent updates. + * + * Usage: + * import { AIChatSidebar } from '.opencode/ui/chat-sidebar' + * + * + * @see .agents/tools/ui/ai-chat-sidebar.md for architecture docs + */ + +'use client' + +import type { ModelTier } from './types' +import { SettingsProvider } from './context/settings-context' +import { SidebarProvider } from './context/sidebar-context' +import { ChatProvider } from './context/chat-context' +import { DEFAULT_SIDEBAR_WIDTH } from './constants' + +// ============================================ +// Root Component +// ============================================ + +interface AIChatSidebarProps { + /** Initial open state (can be read from cookie on server) */ + readonly defaultOpen?: boolean + /** Initial sidebar width in pixels */ + readonly defaultWidth?: number + /** Default model tier for new conversations */ + readonly defaultModel?: ModelTier +} + +/** + * Root AI Chat Sidebar component. + * Wraps all providers and renders the sidebar UI. + * + * Implementation of ChatSidebar component is in t005.2. + * This file provides the provider composition scaffold. + */ +export function AIChatSidebar({ + defaultOpen = false, + defaultWidth = DEFAULT_SIDEBAR_WIDTH, + defaultModel = 'sonnet', +}: AIChatSidebarProps) { + return ( + + + + {/* ChatSidebar component — implemented in t005.2 */} + + {/* Placeholder: replace with in t005.2 */} + + + + + ) +} + +// ============================================ +// Re-exports for consumer convenience +// ============================================ + +export type { ModelTier, ChatMessage, Conversation, ContextSource } from './types' +export { useSidebar, useSidebarOptional } from './context/sidebar-context' +export { useChat } from './context/chat-context' +export { useSettings } from './context/settings-context' diff --git a/.opencode/ui/chat-sidebar/lib/api-client.ts b/.opencode/ui/chat-sidebar/lib/api-client.ts new file mode 100644 index 0000000000..a65b4acd10 --- /dev/null +++ b/.opencode/ui/chat-sidebar/lib/api-client.ts @@ -0,0 +1,83 @@ +/** + * Chat API Client — HTTP client for the Elysia chat backend + * + * Provides typed methods for all chat API endpoints. + * Streaming is handled by useStreaming hook directly (SSE via fetch). + * + * Implementation task: t005.4 + * @see .agents/tools/ui/ai-chat-sidebar.md "API Design" + */ + +import type { + ChatRequest, + ChatResponse, + Conversation, + ContextSource, + ModelInfo, + ResolvedContext, +} from '../types' +import { CHAT_API } from '../constants' + +/** + * Send a message and get a complete (non-streaming) response. + * Use this for simple requests where streaming is not needed. + */ +export async function sendMessage(request: ChatRequest): Promise { + const response = await fetch(CHAT_API.send, { + method: 'POST', + headers: { 'Content-Type': 'application/json' }, + body: JSON.stringify(request), + }) + + if (!response.ok) { + const error = await response.json().catch(() => ({ message: 'Unknown error' })) + throw new Error((error as { message: string }).message || `Request failed: ${response.status}`) + } + + return response.json() as Promise +} + +/** + * List all conversations stored on the server. + * Falls back to empty array on error. + */ +export async function listConversations(): Promise { + try { + const response = await fetch(CHAT_API.conversations) + if (!response.ok) return [] + return (await response.json()) as Conversation[] + } catch { + return [] + } +} + +/** + * List available models and their current status. + */ +export async function listModels(): Promise { + const response = await fetch(CHAT_API.models) + if (!response.ok) { + throw new Error(`Failed to fetch models: ${response.status}`) + } + return (await response.json()) as ModelInfo[] +} + +/** + * Resolve context sources to their content. + * Used to preview what context will be injected into a conversation. + */ +export async function resolveContext( + sources: ContextSource[], +): Promise { + const response = await fetch(CHAT_API.context, { + method: 'POST', + headers: { 'Content-Type': 'application/json' }, + body: JSON.stringify({ sources }), + }) + + if (!response.ok) { + throw new Error(`Failed to resolve context: ${response.status}`) + } + + return (await response.json()) as ResolvedContext[] +} diff --git a/.opencode/ui/chat-sidebar/lib/storage.ts b/.opencode/ui/chat-sidebar/lib/storage.ts new file mode 100644 index 0000000000..3634d367dd --- /dev/null +++ b/.opencode/ui/chat-sidebar/lib/storage.ts @@ -0,0 +1,100 @@ +/** + * Storage utilities — Cookie and localStorage helpers + * + * Centralized persistence layer for the chat sidebar. + * Handles serialization, size limits, and error recovery. + * + * Implementation task: t005.2 + * @see .agents/tools/ui/ai-chat-sidebar.md + */ + +// ============================================ +// Cookie Helpers +// ============================================ + +/** + * Set a cookie with the given name, value, and max age. + */ +export function setCookie(name: string, value: string, maxAge: number): void { + document.cookie = `${name}=${encodeURIComponent(value)}; path=/; max-age=${maxAge}` +} + +/** + * Get a cookie value by name. Returns null if not found. + */ +export function getCookie(name: string): string | null { + const match = document.cookie.match( + new RegExp(`(?:^|; )${name}=([^;]*)`), + ) + return match ? decodeURIComponent(match[1]) : null +} + +/** + * Delete a cookie by setting max-age to 0. + */ +export function deleteCookie(name: string): void { + document.cookie = `${name}=; path=/; max-age=0` +} + +// ============================================ +// localStorage Helpers +// ============================================ + +/** + * Get a parsed JSON value from localStorage. + * Returns the fallback value on any error. + */ +export function getStorageItem(key: string, fallback: T): T { + try { + const stored = localStorage.getItem(key) + if (stored === null) return fallback + return JSON.parse(stored) as T + } catch { + return fallback + } +} + +/** + * Set a JSON value in localStorage. + * Fails silently if storage is full or unavailable. + */ +export function setStorageItem(key: string, value: T): void { + try { + localStorage.setItem(key, JSON.stringify(value)) + } catch { + // Storage full or unavailable — fail silently + } +} + +/** + * Remove an item from localStorage. + */ +export function removeStorageItem(key: string): void { + try { + localStorage.removeItem(key) + } catch { + // Fail silently + } +} + +// ============================================ +// Serialization Helpers +// ============================================ + +/** + * Estimate the byte size of a JSON-serializable value. + * Used to check if data will fit in cookies (~4KB) or localStorage (~5MB). + */ +export function estimateByteSize(value: unknown): number { + try { + return new Blob([JSON.stringify(value)]).size + } catch { + return 0 + } +} + +/** Maximum cookie value size (conservative, accounting for name + metadata) */ +export const MAX_COOKIE_BYTES = 3800 + +/** Maximum localStorage value size per key (conservative) */ +export const MAX_STORAGE_BYTES = 4_500_000 diff --git a/.opencode/ui/chat-sidebar/types.ts b/.opencode/ui/chat-sidebar/types.ts new file mode 100644 index 0000000000..1af666de87 --- /dev/null +++ b/.opencode/ui/chat-sidebar/types.ts @@ -0,0 +1,257 @@ +/** + * AI Chat Sidebar — Shared Type Definitions + * + * Central type system for the chat sidebar feature. + * Used by contexts, components, hooks, and the API layer. + * + * @see .agents/tools/ui/ai-chat-sidebar.md for architecture docs + */ + +// ============================================ +// Message Types +// ============================================ + +export type MessageRole = 'user' | 'assistant' | 'system' + +export type MessageStatus = 'pending' | 'streaming' | 'complete' | 'error' + +export interface ChatMessage { + /** Unique message identifier (nanoid or crypto.randomUUID) */ + id: string + /** Who sent this message */ + role: MessageRole + /** Message content (plain text for user, markdown for assistant) */ + content: string + /** Current lifecycle status */ + status: MessageStatus + /** Unix timestamp (ms) when message was created */ + timestamp: number + /** Model that generated this response (assistant messages only) */ + model?: string + /** Token count for the response (assistant messages only) */ + tokenCount?: number + /** Error details if status === 'error' */ + error?: string +} + +// ============================================ +// Conversation Types +// ============================================ + +export interface Conversation { + /** Unique conversation identifier */ + id: string + /** Display title (auto-generated from first message or user-set) */ + title: string + /** Ordered list of messages in this conversation */ + messages: ChatMessage[] + /** Unix timestamp (ms) when conversation was created */ + createdAt: number + /** Unix timestamp (ms) of last activity */ + updatedAt: number + /** Default model tier for this conversation */ + model: string + /** Context sources injected into this conversation */ + contextSources: ContextSource[] +} + +export interface ContextSource { + /** Type of context to resolve */ + type: 'file' | 'directory' | 'memory' | 'agent' | 'custom' + /** Path or identifier for the source */ + path: string + /** Human-readable label for display */ + label: string + /** Whether this source is currently active */ + enabled: boolean +} + +// ============================================ +// Sidebar State Types +// ============================================ + +export type SidebarPosition = 'right' | 'left' + +export interface SidebarState { + /** Whether the sidebar panel is open */ + open: boolean + /** Current width in pixels */ + width: number + /** Which side of the viewport */ + position: SidebarPosition +} + +// ============================================ +// Chat State Types +// ============================================ + +export interface ChatState { + /** All conversations */ + conversations: Conversation[] + /** Currently active conversation ID */ + activeConversationId: string | null + /** Whether a response is currently streaming */ + isStreaming: boolean + /** Partial content accumulated during streaming */ + streamingContent: string +} + +// ============================================ +// Settings Types +// ============================================ + +/** Model tier identifiers matching aidevops model routing */ +export type ModelTier = 'haiku' | 'flash' | 'sonnet' | 'pro' | 'opus' + +export interface SettingsState { + /** Default model tier for new conversations */ + defaultModel: ModelTier + /** Default context sources for new conversations */ + contextSources: ContextSource[] + /** Maximum tokens for AI responses */ + maxTokens: number + /** Temperature for AI responses (0-1) */ + temperature: number +} + +// ============================================ +// API Types +// ============================================ + +/** Request body for POST /api/chat/send and /api/chat/stream */ +export interface ChatRequest { + /** Conversation ID (creates new if not found) */ + conversationId: string + /** The user's message content */ + message: string + /** Model tier to use for this request */ + model: ModelTier + /** Context sources to resolve and inject */ + contextSources: ContextSource[] + /** Max tokens for the response */ + maxTokens: number + /** Temperature (0-1) */ + temperature: number +} + +/** Response body for POST /api/chat/send (non-streaming) */ +export interface ChatResponse { + /** The assistant's response content */ + content: string + /** Concrete model used (e.g., 'claude-sonnet-4-20250514') */ + model: string + /** Token count for the response */ + tokenCount: number + /** Conversation ID */ + conversationId: string +} + +/** SSE event types for POST /api/chat/stream */ +export type StreamEventType = 'start' | 'delta' | 'done' | 'error' + +export interface StreamStartEvent { + type: 'start' + conversationId: string + model: string +} + +export interface StreamDeltaEvent { + type: 'delta' + content: string +} + +export interface StreamDoneEvent { + type: 'done' + tokenCount: number + model: string +} + +export interface StreamErrorEvent { + type: 'error' + message: string + code: string +} + +export type StreamEvent = + | StreamStartEvent + | StreamDeltaEvent + | StreamDoneEvent + | StreamErrorEvent + +/** Response for GET /api/chat/models */ +export interface ModelInfo { + /** Model tier identifier */ + tier: ModelTier + /** Human-readable name */ + name: string + /** Whether this model is currently available */ + available: boolean + /** Concrete model ID if resolved */ + modelId?: string +} + +/** Response for POST /api/chat/context */ +export interface ResolvedContext { + /** The source that was resolved */ + source: ContextSource + /** Resolved content (truncated if too large) */ + content: string + /** Token estimate for this content */ + tokenEstimate: number + /** Whether content was truncated */ + truncated: boolean +} + +// ============================================ +// Hook Return Types +// ============================================ + +export interface UseChatReturn { + /** All conversations */ + conversations: Conversation[] + /** Currently active conversation (derived) */ + activeConversation: Conversation | null + /** Whether a response is currently streaming */ + isStreaming: boolean + /** Partial content during streaming */ + streamingContent: string + /** Send a message in the active conversation */ + sendMessage: (content: string) => Promise + /** Abort the current streaming response */ + stopStreaming: () => void + /** Create a new empty conversation */ + newConversation: () => void + /** Switch to a different conversation */ + switchConversation: (id: string) => void + /** Delete a conversation */ + deleteConversation: (id: string) => void +} + +export interface UseStreamingReturn { + /** Whether currently streaming */ + isStreaming: boolean + /** Accumulated content from the stream */ + content: string + /** Start a new stream */ + startStream: (request: ChatRequest) => void + /** Abort the current stream */ + stopStream: () => void + /** Error from the stream (if any) */ + error: string | null + /** Model info from the stream start event */ + model: string | null + /** Token count from the stream done event */ + tokenCount: number | null +} + +export interface UseResizeReturn { + /** Current width */ + width: number + /** Whether currently dragging */ + isDragging: boolean + /** Props to spread on the resize handle element */ + handleProps: { + onPointerDown: (e: PointerEvent) => void + style: Record + } +}