Skip to content
Closed
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Original file line number Diff line number Diff line change
Expand Up @@ -11,7 +11,7 @@ import { Dialog, DialogContent, DialogDescription, DialogHeader, DialogTitle } f
import { cn } from "../../ui/primitives/styles";
import { Tabs, TabsContent } from "../../ui/primitives/tabs";
import { useCrawlUrl, useUploadDocument } from "../hooks";
import type { CrawlRequest, UploadMetadata } from "../types";
import type { CrawlRequest, KnowledgeItemsFilter, UploadMetadata } from "../types";
import { KnowledgeTypeSelector } from "./KnowledgeTypeSelector";
import { LevelSelector } from "./LevelSelector";
import { TagInput } from "./TagInput";
Expand All @@ -21,18 +21,20 @@ interface AddKnowledgeDialogProps {
onOpenChange: (open: boolean) => void;
onSuccess: () => void;
onCrawlStarted?: (progressId: string) => void;
currentFilter?: KnowledgeItemsFilter;
}

export const AddKnowledgeDialog: React.FC<AddKnowledgeDialogProps> = ({
open,
onOpenChange,
onSuccess,
onCrawlStarted,
currentFilter,
}) => {
const [activeTab, setActiveTab] = useState<"crawl" | "upload">("crawl");
const { showToast } = useToast();
const crawlMutation = useCrawlUrl();
const uploadMutation = useUploadDocument();
const crawlMutation = useCrawlUrl(currentFilter);
const uploadMutation = useUploadDocument(currentFilter);

// Generate unique IDs for form elements
const urlId = useId();
Expand Down Expand Up @@ -83,7 +85,6 @@ export const AddKnowledgeDialog: React.FC<AddKnowledgeDialogProps> = ({
showToast("Crawl started successfully", "success");
resetForm();
onSuccess();
onOpenChange(false);
} catch (error) {
// Display the actual error message from backend
const message = error instanceof Error ? error.message : "Failed to start crawl";
Expand Down Expand Up @@ -198,7 +199,7 @@ export const AddKnowledgeDialog: React.FC<AddKnowledgeDialogProps> = ({
</Label>
<div className="relative">
<div className="absolute inset-y-0 left-0 pl-3 flex items-center pointer-events-none">
<Globe className="h-5 w-5" style={{ color: "#0891b2" }} />
<Globe className="h-5 w-5 text-cyan-600" />
</div>
<Input
id={urlId}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -37,6 +37,14 @@ vi.mock("@/features/shared/hooks", () => ({
}),
}));

// Test filter for use in tests that require a current filter
const testCurrentFilter = {
knowledge_type: 'technical' as const,
search: '',
page: 1,
per_page: 100
};

// Test wrapper with QueryClient
const createWrapper = () => {
const queryClient = new QueryClient({
Expand Down Expand Up @@ -163,6 +171,10 @@ describe("useKnowledgeQueries", () => {
});

describe("useCrawlUrl", () => {
beforeEach(() => {
vi.clearAllMocks();
});

it("should start crawl and return progress ID", async () => {
const crawlRequest = {
url: "https://example.com",
Expand Down Expand Up @@ -203,9 +215,142 @@ describe("useKnowledgeQueries", () => {
}),
).rejects.toThrow("Invalid URL");
});

it("should perform optimistic updates using provided current filter", async () => {
const crawlRequest = {
url: "https://example.com",
knowledge_type: "technical" as const,
tags: ["docs"],
max_depth: 2,
};

const mockResponse = {
success: true,
progressId: "progress-123",
message: "Crawling started",
estimatedDuration: "3-5 minutes",
};

const { knowledgeService } = await import("../../services");
vi.mocked(knowledgeService.crawlUrl).mockResolvedValue(mockResponse);

// Set up initial cache data
const initialData: KnowledgeItemsResponse = {
items: [],
total: 0,
page: 1,
per_page: 100,
};

const queryClient = new QueryClient({
defaultOptions: {
queries: { retry: false },
mutations: { retry: false },
},
});

// Set up cache with test filter
queryClient.setQueryData(knowledgeKeys.summaries(testCurrentFilter), initialData);

const wrapper = ({ children }: { children: React.ReactNode }) =>
React.createElement(QueryClientProvider, { client: queryClient }, children);

const { result } = renderHook(() => useCrawlUrl(testCurrentFilter), { wrapper });

// Execute mutation
await result.current.mutateAsync(crawlRequest);

// Verify optimistic update was applied to current filter cache
const updatedData = queryClient.getQueryData<KnowledgeItemsResponse>(
knowledgeKeys.summaries(testCurrentFilter)
);

expect(updatedData).toBeDefined();
expect(updatedData?.items).toHaveLength(1);
expect(updatedData?.items[0]).toMatchObject({
url: crawlRequest.url,
knowledge_type: crawlRequest.knowledge_type,
status: "processing",
});
});

it("should update cache for matching filters during optimistic updates", async () => {
const crawlRequest = {
url: "https://example.com",
knowledge_type: "technical" as const, // Matches testCurrentFilter.knowledge_type
};

const mockResponse = {
success: true,
progressId: "progress-123",
message: "Crawling started",
};

const { knowledgeService } = await import("../../services");
vi.mocked(knowledgeService.crawlUrl).mockResolvedValue(mockResponse);

const initialData: KnowledgeItemsResponse = {
items: [],
total: 0,
page: 1,
per_page: 100,
};

const queryClient = new QueryClient({
defaultOptions: {
queries: { retry: false },
mutations: { retry: false },
},
});

// Pre-populate cache with current filter
queryClient.setQueryData(knowledgeKeys.summaries(testCurrentFilter), initialData);

const wrapper = ({ children }: { children: React.ReactNode }) =>
React.createElement(QueryClientProvider, { client: queryClient }, children);

const { result } = renderHook(() => useCrawlUrl(testCurrentFilter), { wrapper });

await result.current.mutateAsync(crawlRequest);

// Verify optimistic update was applied
const updatedData = queryClient.getQueryData<KnowledgeItemsResponse>(
knowledgeKeys.summaries(testCurrentFilter)
);

expect(updatedData?.items).toHaveLength(1);
expect(updatedData?.total).toBe(1);
});

it("should work without currentFilter parameter", async () => {
const crawlRequest = {
url: "https://example.com",
knowledge_type: "technical" as const,
};

const mockResponse = {
success: true,
progressId: "progress-123",
message: "Crawling started",
};

const { knowledgeService } = await import("../../services");
vi.mocked(knowledgeService.crawlUrl).mockResolvedValue(mockResponse);

const wrapper = createWrapper();
const { result } = renderHook(() => useCrawlUrl(), { wrapper });

// Should work without currentFilter parameter
const response = await result.current.mutateAsync(crawlRequest);
expect(response).toEqual(mockResponse);
});
});

describe("useUploadDocument", () => {
beforeEach(() => {
vi.clearAllMocks();
});

it("should upload document with metadata", async () => {
const file = new File(["test content"], "test.pdf", { type: "application/pdf" });
const metadata = {
Expand Down Expand Up @@ -242,5 +387,169 @@ describe("useKnowledgeQueries", () => {

await expect(result.current.mutateAsync({ file, metadata: {} })).rejects.toThrow("File too large");
});

it("should perform filter-aware optimistic updates for document uploads", async () => {
const file = new File(["test content"], "test.pdf", { type: "application/pdf" });
const metadata = {
knowledge_type: "technical" as const, // Matches testCurrentFilter.knowledge_type
};

const mockResponse = {
success: true,
progressId: "upload-456",
message: "Upload started",
filename: "test.pdf",
};

const { knowledgeService } = await import("../../services");
vi.mocked(knowledgeService.uploadDocument).mockResolvedValue(mockResponse);

const initialData: KnowledgeItemsResponse = {
items: [],
total: 0,
page: 1,
per_page: 100,
};

const queryClient = new QueryClient({
defaultOptions: {
queries: { retry: false },
mutations: { retry: false },
},
});

queryClient.setQueryData(knowledgeKeys.summaries(testCurrentFilter), initialData);

const wrapper = ({ children }: { children: React.ReactNode }) =>
React.createElement(QueryClientProvider, { client: queryClient }, children);

const { result } = renderHook(() => useUploadDocument(testCurrentFilter), { wrapper });

await result.current.mutateAsync({ file, metadata });

// Verify optimistic update was applied to the cache
const updatedData = queryClient.getQueryData<KnowledgeItemsResponse>(
knowledgeKeys.summaries(testCurrentFilter)
);

expect(updatedData?.items).toHaveLength(1);
expect(updatedData?.items[0]).toMatchObject({
title: "test.pdf",
knowledge_type: metadata.knowledge_type,
status: "processing",
});
});

it("should use provided current filter for optimistic updates", async () => {
const file = new File(["content"], "doc.pdf", { type: "application/pdf" });
const metadata = {
knowledge_type: "technical" as const, // Matches test filter
};

const mockResponse = {
success: true,
progressId: "upload-789",
message: "Upload started",
filename: "doc.pdf",
};

const { knowledgeService } = await import("../../services");
vi.mocked(knowledgeService.uploadDocument).mockResolvedValue(mockResponse);

const initialData: KnowledgeItemsResponse = {
items: [],
total: 0,
page: 1,
per_page: 100,
};

const queryClient = new QueryClient({
defaultOptions: {
queries: { retry: false },
mutations: { retry: false },
},
});

queryClient.setQueryData(knowledgeKeys.summaries(testCurrentFilter), initialData);

const wrapper = ({ children }: { children: React.ReactNode }) =>
React.createElement(QueryClientProvider, { client: queryClient }, children);

const { result } = renderHook(() => useUploadDocument(testCurrentFilter), { wrapper });

await result.current.mutateAsync({ file, metadata });

// Verify the cache was updated
const updatedData = queryClient.getQueryData<KnowledgeItemsResponse>(
knowledgeKeys.summaries(testCurrentFilter)
);

expect(updatedData?.items).toHaveLength(1);
});
});

describe("Filter Parameter Integration", () => {
beforeEach(() => {
vi.clearAllMocks();
});

it("should prioritize provided current filter updates over other cache keys", async () => {
// This test verifies the core enhancement: prioritizing provided current filter updates
const crawlRequest = {
url: "https://example.com",
knowledge_type: "technical" as const, // Matches testCurrentFilter.knowledge_type
};

const mockResponse = {
success: true,
progressId: "priority-test",
message: "Crawling started",
};

// Set up multiple cached filters
const otherFilter = { knowledge_type: 'business' as const, search: '', page: 1, per_page: 100 };

const { knowledgeService } = await import("../../services");
vi.mocked(knowledgeService.crawlUrl).mockResolvedValue(mockResponse);

const initialData: KnowledgeItemsResponse = {
items: [],
total: 0,
page: 1,
per_page: 100,
};

const queryClient = new QueryClient({
defaultOptions: {
queries: { retry: false },
mutations: { retry: false },
},
});

// Set up both caches
queryClient.setQueryData(knowledgeKeys.summaries(testCurrentFilter), initialData);
queryClient.setQueryData(knowledgeKeys.summaries(otherFilter), initialData);

const wrapper = ({ children }: { children: React.ReactNode }) =>
React.createElement(QueryClientProvider, { client: queryClient }, children);

const { result } = renderHook(() => useCrawlUrl(testCurrentFilter), { wrapper });

await result.current.mutateAsync(crawlRequest);

// Verify current filter cache was updated first (gets priority)
const currentFilterData = queryClient.getQueryData<KnowledgeItemsResponse>(
knowledgeKeys.summaries(testCurrentFilter)
);
const otherFilterData = queryClient.getQueryData<KnowledgeItemsResponse>(
knowledgeKeys.summaries(otherFilter)
);

// Current filter should be updated since knowledge_type matches
expect(currentFilterData?.items).toHaveLength(1);

// Other filter should remain unchanged (no knowledge_type match)
expect(otherFilterData?.items).toHaveLength(0);
});
});
});
Loading