Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
460 changes: 460 additions & 0 deletions .agents/tools/ui/ai-chat-sidebar.md

Large diffs are not rendered by default.

172 changes: 172 additions & 0 deletions .opencode/ui/chat-sidebar/constants.ts
Original file line number Diff line number Diff line change
@@ -0,0 +1,172 @@
/**
* AI Chat Sidebar — Configuration Constants
*
* Centralized constants for the chat sidebar feature.
* Adjust these to change default behavior without modifying component logic.
*
* @see .agents/tools/ui/ai-chat-sidebar.md for architecture docs
*/

import type { ModelTier, SidebarPosition } from './types'

// ============================================
// Sidebar Panel
// ============================================

/** Default sidebar width in pixels */
export const DEFAULT_SIDEBAR_WIDTH = 420

/** Minimum sidebar width (resize clamp) */
export const MIN_SIDEBAR_WIDTH = 320

/** Maximum sidebar width (resize clamp) */
export const MAX_SIDEBAR_WIDTH = 640

/** Default sidebar position */
export const DEFAULT_SIDEBAR_POSITION: SidebarPosition = 'right'

/** Cookie name for sidebar open/close state */
export const SIDEBAR_STATE_COOKIE = 'ai_chat_sidebar_state'

/** Cookie name for sidebar width */
export const SIDEBAR_WIDTH_COOKIE = 'ai_chat_sidebar_width'

/** Cookie max age: 7 days (seconds) */
export const SIDEBAR_COOKIE_MAX_AGE = 60 * 60 * 24 * 7

// ============================================
// Chat
// ============================================

/** localStorage key for conversation data */
export const CONVERSATIONS_STORAGE_KEY = 'ai_chat_conversations'

/** Maximum conversations to keep in storage */
export const MAX_STORED_CONVERSATIONS = 50

/** Maximum messages per conversation before truncation */
export const MAX_MESSAGES_PER_CONVERSATION = 200

/** Default title for new conversations */
export const DEFAULT_CONVERSATION_TITLE = 'New conversation'

// ============================================
// Settings
// ============================================

/** Default model tier for new conversations */
export const DEFAULT_MODEL: ModelTier = 'sonnet'

/** Default max tokens for AI responses */
export const DEFAULT_MAX_TOKENS = 4096

/** Default temperature for AI responses */
export const DEFAULT_TEMPERATURE = 0.7

/** Cookie name for settings */
export const SETTINGS_COOKIE = 'ai_chat_settings'

/** Settings cookie max age: 30 days (seconds) */
export const SETTINGS_COOKIE_MAX_AGE = 60 * 60 * 24 * 30

// ============================================
// Input
// ============================================

/** Maximum lines before textarea stops growing */
export const INPUT_MAX_LINES = 6

/** Maximum character count for a single message */
export const INPUT_MAX_CHARS = 32_000

// ============================================
// Streaming
// ============================================

/** SSE reconnect delay in milliseconds */
export const SSE_RECONNECT_DELAY = 3000

/** SSE connection timeout in milliseconds */
export const SSE_TIMEOUT = 120_000

// ============================================
// API
// ============================================

/** Base path for chat API routes */
export const CHAT_API_BASE = '/api/chat'

/** Chat API endpoints */
export const CHAT_API = {
send: `${CHAT_API_BASE}/send`,
stream: `${CHAT_API_BASE}/stream`,
conversations: `${CHAT_API_BASE}/conversations`,
models: `${CHAT_API_BASE}/models`,
context: `${CHAT_API_BASE}/context`,
} as const

// ============================================
// Keyboard Shortcuts
// ============================================

/** Toggle sidebar shortcut */
export const TOGGLE_SHORTCUT = {
key: 'l',
metaKey: true,
shiftKey: true,
label: '⌘⇧L',
} as const

/** Send message shortcut (Enter without Shift) */
export const SEND_SHORTCUT = {
key: 'Enter',
shiftKey: false,
label: 'Enter',
} as const

/** New line shortcut (Shift+Enter) */
export const NEWLINE_SHORTCUT = {
key: 'Enter',
shiftKey: true,
label: '⇧Enter',
} as const

// ============================================
// Accessibility
// ============================================

/** ARIA labels */
export const ARIA = {
sidebar: 'AI chat sidebar',
toggleButton: 'Open AI chat',
closeButton: 'Close AI chat',
sendButton: 'Send message',
stopButton: 'Stop generating',
messageList: 'Chat messages',
input: 'Type a message',
resizeHandle: 'Resize chat sidebar',
newChat: 'Start new conversation',
} as const

// ============================================
// CSS Custom Properties
// ============================================

/** CSS variable names used by the sidebar */
export const CSS_VARS = {
sidebarWidth: '--ai-sidebar-width',
sidebarTransition: '--ai-sidebar-transition',
} as const

// ============================================
// Model Display Names
// ============================================

/** Human-readable names for model tiers */
export const MODEL_DISPLAY_NAMES: Record<ModelTier, string> = {
haiku: 'Haiku (fast)',
flash: 'Flash (balanced)',
sonnet: 'Sonnet (default)',
pro: 'Pro (advanced)',
opus: 'Opus (best)',
}
193 changes: 193 additions & 0 deletions .opencode/ui/chat-sidebar/context/chat-context.tsx
Original file line number Diff line number Diff line change
@@ -0,0 +1,193 @@
/**
* Chat Context — Conversation state and streaming
*
* Manages conversations, messages, and streaming state.
* Persists conversations to localStorage.
*
* Implementation task: t005.3
* @see .agents/tools/ui/ai-chat-sidebar.md
*/

'use client'

import {
createContext,
useCallback,
useContext,
useMemo,
useState,
} from 'react'
import type { ReactNode } from 'react'
import type {
ChatState,
Conversation,
UseChatReturn,
} from '../types'
import {
CONVERSATIONS_STORAGE_KEY,
DEFAULT_CONVERSATION_TITLE,
DEFAULT_MODEL,
MAX_STORED_CONVERSATIONS,
} from '../constants'

// ============================================
// Context
// ============================================

const ChatContext = createContext<UseChatReturn | null>(null)

// ============================================
// Hooks
// ============================================

/**
* Access chat state and operations.
* Returns safe no-op defaults when used outside provider.
*/
export function useChat(): UseChatReturn {
const context = useContext(ChatContext)
if (!context) {
return {
conversations: [],
activeConversation: null,
isStreaming: false,
streamingContent: '',
sendMessage: async () => {},
stopStreaming: () => {},
newConversation: () => {},
switchConversation: () => {},
deleteConversation: () => {},
}
}
return context
}

// ============================================
// Storage Helpers
// ============================================

function loadConversations(): Conversation[] {
try {
const stored = localStorage.getItem(CONVERSATIONS_STORAGE_KEY)
if (!stored) return []
return JSON.parse(stored) as Conversation[]
} catch {
return []
}
}

function saveConversations(conversations: Conversation[]): void {
try {
// Keep only the most recent conversations
const trimmed = conversations.slice(0, MAX_STORED_CONVERSATIONS)
localStorage.setItem(CONVERSATIONS_STORAGE_KEY, JSON.stringify(trimmed))
} catch {
// localStorage may be full or unavailable — fail silently
}
}

function generateId(): string {
return crypto.randomUUID()
}

// ============================================
// Provider
// ============================================

interface ChatProviderProps {
readonly children: ReactNode
}

export function ChatProvider({ children }: ChatProviderProps) {
const [conversations, setConversations] = useState<Conversation[]>(loadConversations)
const [activeConversationId, setActiveConversationId] = useState<string | null>(
() => conversations[0]?.id ?? null,
)
const [isStreaming, setIsStreaming] = useState(false)
const [streamingContent, setStreamingContent] = useState('')

// Derived state
const activeConversation = useMemo(
() => conversations.find((c) => c.id === activeConversationId) ?? null,
[conversations, activeConversationId],
)

const newConversation = useCallback(() => {
const conversation: Conversation = {
id: generateId(),
title: DEFAULT_CONVERSATION_TITLE,
messages: [],
createdAt: Date.now(),
updatedAt: Date.now(),
model: DEFAULT_MODEL,
contextSources: [],
}
setConversations((prev) => {
const updated = [conversation, ...prev]
saveConversations(updated)
return updated
})
setActiveConversationId(conversation.id)
}, [])

const switchConversation = useCallback((id: string) => {
setActiveConversationId(id)
}, [])

const deleteConversation = useCallback((id: string) => {
setConversations((prev) => {
const updated = prev.filter((c) => c.id !== id)
saveConversations(updated)
return updated
})
setActiveConversationId((prevId) => (prevId === id ? null : prevId))
}, [])

const sendMessage = useCallback(async (content: string) => {
// Stub — full implementation in t005.3 with streaming hook
// This will:
// 1. Add user message to active conversation
// 2. Create assistant message with status: 'streaming'
// 3. Open SSE connection
// 4. Accumulate streamingContent
// 5. Finalize on stream end
void content
}, [])

const stopStreaming = useCallback(() => {
// Stub — full implementation in t005.3
setIsStreaming(false)
setStreamingContent('')
}, [])

const contextValue = useMemo<UseChatReturn>(
() => ({
conversations,
activeConversation,
isStreaming,
streamingContent,
sendMessage,
stopStreaming,
newConversation,
switchConversation,
deleteConversation,
}),
[
conversations,
activeConversation,
isStreaming,
streamingContent,
sendMessage,
stopStreaming,
newConversation,
switchConversation,
deleteConversation,
],
)

return (
<ChatContext.Provider value={contextValue}>
{children}
</ChatContext.Provider>
)
}
Loading