diff --git a/__tests__/components/header/HeaderSearchModalItem.test.tsx b/__tests__/components/header/HeaderSearchModalItem.test.tsx
index 6dbaf99555..78fe91550e 100644
--- a/__tests__/components/header/HeaderSearchModalItem.test.tsx
+++ b/__tests__/components/header/HeaderSearchModalItem.test.tsx
@@ -42,6 +42,7 @@ const getProfileTargetRouteMock = jest.fn(() => "/profile-route");
jest.mock("@/helpers/Helpers", () => ({
cicToType: (n: number) => `type${n}`,
formatNumberWithCommas: (n: number) => `formatted-${n}`,
+ formatStatFloor: (n: number) => n,
getProfileTargetRoute: () => getProfileTargetRouteMock(),
}));
diff --git a/__tests__/components/nextGen/collections/collectionParts/mint/NextGenMintBurnWidget.test.tsx b/__tests__/components/nextGen/collections/collectionParts/mint/NextGenMintBurnWidget.test.tsx
index f923950eb2..ad6cb8ba7c 100644
--- a/__tests__/components/nextGen/collections/collectionParts/mint/NextGenMintBurnWidget.test.tsx
+++ b/__tests__/components/nextGen/collections/collectionParts/mint/NextGenMintBurnWidget.test.tsx
@@ -1,11 +1,12 @@
-import React from 'react';
-import { render, screen, waitFor } from '@testing-library/react';
-import NextGenMintBurnWidget from '@/components/nextGen/collections/collectionParts/mint/NextGenMintBurnWidget';
-import { Status } from '@/components/nextGen/nextgen_entities';
-import { NEXTGEN_CHAIN_ID, NEXTGEN_CORE } from '@/components/nextGen/nextgen_contracts';
-
-jest.mock('react-bootstrap', () => {
- const React = require('react');
+import NextGenMintBurnWidget from "@/components/nextGen/collections/collectionParts/mint/NextGenMintBurnWidget";
+import {
+ NEXTGEN_CHAIN_ID,
+ NEXTGEN_CORE,
+} from "@/components/nextGen/nextgen_contracts";
+import { Status } from "@/components/nextGen/nextgen_entities";
+import { render, screen, waitFor } from "@testing-library/react";
+
+jest.mock("react-bootstrap", () => {
const Form = (p: any) =>
;
Form.Group = (p: any) => ;
Form.Label = (p: any) => ;
@@ -20,32 +21,44 @@ jest.mock('react-bootstrap', () => {
};
});
-jest.mock('@/components/nextGen/collections/collectionParts/mint/NextGenMintShared', () => ({
- NextGenMintingFor: () => ,
-}));
+jest.mock(
+ "@/components/nextGen/collections/collectionParts/mint/NextGenMintShared",
+ () => ({
+ NextGenMintingFor: () => ,
+ })
+);
-jest.mock('@/components/nextGen/NextGenContractWriteStatus', () => () => );
+jest.mock("@/components/nextGen/NextGenContractWriteStatus", () => () => (
+
+));
const mockFetchUrl = jest.fn();
-jest.mock('@/services/6529api', () => ({ fetchUrl: (...args: any[]) => mockFetchUrl(...args) }));
+jest.mock("@/services/6529api", () => ({
+ fetchUrl: (...args: any[]) => mockFetchUrl(...args),
+}));
-jest.mock('@/components/auth/SeizeConnectContext', () => ({
+jest.mock("@/components/auth/SeizeConnectContext", () => ({
useSeizeConnectContext: jest.fn(),
}));
-jest.mock('wagmi', () => ({
+jest.mock("wagmi", () => ({
useChainId: jest.fn(),
useWriteContract: jest.fn(),
}));
-jest.mock('@/components/nextGen/nextgen_helpers', () => ({
+jest.mock("@/components/nextGen/nextgen_helpers", () => ({
useMintSharedState: jest.fn(),
getStatusFromDates: jest.fn(),
}));
-const { useChainId, useWriteContract } = require('wagmi');
-const { useSeizeConnectContext } = require('@/components/auth/SeizeConnectContext');
-const { useMintSharedState, getStatusFromDates } = require('@/components/nextGen/nextgen_helpers');
+const { useChainId, useWriteContract } = require("wagmi");
+const {
+ useSeizeConnectContext,
+} = require("@/components/auth/SeizeConnectContext");
+const {
+ useMintSharedState,
+ getStatusFromDates,
+} = require("@/components/nextGen/nextgen_helpers");
const originalFetch = global.fetch;
const mockFetch = jest.fn() as jest.MockedFunction;
@@ -54,9 +67,9 @@ function createMintState(overrides: Partial = {}) {
return {
burnProofResponse: undefined,
setBurnProofResponse: jest.fn(),
- mintForAddress: '0xabc',
+ mintForAddress: "0xabc",
setMintForAddress: jest.fn(),
- tokenId: '',
+ tokenId: "",
setTokenId: jest.fn(),
salt: 0,
isMinting: false,
@@ -79,7 +92,7 @@ const baseProps = {
} as any,
collection_merkle: {
collection_id: 1,
- merkle_root: 'root',
+ merkle_root: "root",
burn_collection: NEXTGEN_CORE[NEXTGEN_CHAIN_ID],
burn_collection_id: 1,
min_token_index: 0,
@@ -95,12 +108,28 @@ const baseProps = {
refreshMintCounts: jest.fn(),
};
-function renderWidget(props: Partial = {}, state: any = {}, context: any = {}, chainId?: number) {
+function renderWidget(
+ props: Partial = {},
+ state: any = {},
+ context: any = {},
+ chainId?: number
+) {
const mintState = createMintState(state);
(useMintSharedState as jest.Mock).mockReturnValue(mintState);
- (useSeizeConnectContext as jest.Mock).mockReturnValue({ isConnected: true, address: '0xabc', seizeConnect: jest.fn(), ...context });
+ (useSeizeConnectContext as jest.Mock).mockReturnValue({
+ isConnected: true,
+ address: "0xabc",
+ seizeConnect: jest.fn(),
+ ...context,
+ });
(useChainId as jest.Mock).mockReturnValue(chainId ?? NEXTGEN_CHAIN_ID);
- (useWriteContract as jest.Mock).mockReturnValue({ writeContract: jest.fn(), reset: jest.fn(), isPending: false, isSuccess: false, isError: false });
+ (useWriteContract as jest.Mock).mockReturnValue({
+ writeContract: jest.fn(),
+ reset: jest.fn(),
+ isPending: false,
+ isSuccess: false,
+ isError: false,
+ });
(getStatusFromDates as jest.Mock).mockReturnValue(Status.LIVE);
return render();
}
@@ -115,17 +144,18 @@ afterAll(() => {
global.fetch = originalFetch;
});
-
-describe('NextGenMintBurnWidget', () => {
- it('filters tokens by range and prefix', async () => {
+describe("NextGenMintBurnWidget", () => {
+ it("filters tokens by range and prefix", async () => {
mockFetch.mockResolvedValue({
ok: true,
- json: async () => [
- { tokenId: 90 },
- { tokenId: 110, name: 'A' },
- { tokenId: 115 },
- { tokenId: 201 },
- ],
+ json: async () => ({
+ ownedNfts: [
+ { tokenId: "90" },
+ { tokenId: "110", name: "A" },
+ { tokenId: "115" },
+ { tokenId: "201" },
+ ],
+ }),
} as unknown as Response);
renderWidget(
@@ -137,38 +167,40 @@ describe('NextGenMintBurnWidget', () => {
burn_collection_id: 1,
},
},
- { mintForAddress: '0xabc' }
+ { mintForAddress: "0xabc" }
);
await waitFor(() => {
expect(mockFetch).toHaveBeenCalled();
- expect(screen.getAllByRole('option').length).toBeGreaterThan(1);
+ expect(screen.getAllByRole("option").length).toBeGreaterThan(1);
});
- const options = screen.getAllByRole('option');
- expect(options[1]).toHaveValue('110');
- expect(options[2]).toHaveValue('115');
+ const options = screen.getAllByRole("option");
+ expect(options[1]).toHaveValue("110");
+ expect(options[2]).toHaveValue("115");
});
- it('shows connect wallet when not connected', () => {
+ it("shows connect wallet when not connected", () => {
renderWidget({}, {}, { isConnected: false });
- expect(screen.getByRole('button')).toHaveTextContent('Connect Wallet');
+ expect(screen.getByRole("button")).toHaveTextContent("Connect Wallet");
});
- it('prompts network switch when chain differs', () => {
+ it("prompts network switch when chain differs", () => {
renderWidget({}, {}, {}, NEXTGEN_CHAIN_ID + 1);
- expect(screen.getByRole('button')).toHaveTextContent('Switch to');
+ expect(screen.getByRole("button")).toHaveTextContent("Switch to");
});
- it('shows processing state when minting', () => {
+ it("shows processing state when minting", () => {
const { container } = renderWidget({}, { isMinting: true });
- expect(screen.getByRole('button')).toHaveTextContent('Processing...');
- expect(container.querySelector('output')).toBeInTheDocument();
+ expect(screen.getByRole("button")).toHaveTextContent("Processing...");
+ expect(container.querySelector("output")).toBeInTheDocument();
});
- it('disables button when burn not active', () => {
- renderWidget({ collection_merkle: { ...baseProps.collection_merkle, status: false } });
- const btn = screen.getByRole('button');
+ it("disables button when burn not active", () => {
+ renderWidget({
+ collection_merkle: { ...baseProps.collection_merkle, status: false },
+ });
+ const btn = screen.getByRole("button");
expect(btn).toBeDisabled();
- expect(btn).toHaveTextContent('Burn Not Active');
+ expect(btn).toHaveTextContent("Burn Not Active");
});
});
diff --git a/__tests__/components/user/layout/UserPageTabs.test.tsx b/__tests__/components/user/layout/UserPageTabs.test.tsx
index 5d27c75c3a..66aa4cf7c1 100644
--- a/__tests__/components/user/layout/UserPageTabs.test.tsx
+++ b/__tests__/components/user/layout/UserPageTabs.test.tsx
@@ -2,12 +2,18 @@ import { AuthContext } from "@/components/auth/Auth";
import UserPageTabs from "@/components/user/layout/UserPageTabs";
import { USER_PAGE_TAB_IDS } from "@/components/user/layout/userTabs.config";
import { render, screen } from "@testing-library/react";
-import { usePathname, useRouter, useSearchParams } from "next/navigation";
+import {
+ useParams,
+ usePathname,
+ useRouter,
+ useSearchParams,
+} from "next/navigation";
jest.mock("next/navigation", () => ({
useRouter: jest.fn(),
usePathname: jest.fn(),
useSearchParams: jest.fn(),
+ useParams: jest.fn(),
}));
const capacitorMock = jest.fn();
jest.mock("@/hooks/useCapacitor", () => ({
@@ -34,6 +40,7 @@ const renderTabs = (
(useRouter as jest.Mock).mockReturnValue({ push: jest.fn() });
(usePathname as jest.Mock).mockReturnValue("/[user]/rep");
(useSearchParams as jest.Mock).mockReturnValue(new URLSearchParams());
+ (useParams as jest.Mock).mockReturnValue({ user: "testuser" });
capacitorMock.mockReturnValue({ isIos });
(useCookieConsent as jest.Mock).mockReturnValue({
showCookieConsent: false,
diff --git a/__tests__/hooks/useAlchemyNftQueries.test.ts b/__tests__/hooks/useAlchemyNftQueries.test.ts
new file mode 100644
index 0000000000..c949c154ab
--- /dev/null
+++ b/__tests__/hooks/useAlchemyNftQueries.test.ts
@@ -0,0 +1,217 @@
+import { fetchOwnerNfts } from "@/hooks/useAlchemyNftQueries";
+
+const MOCK_API_ENDPOINT = "https://api.example.com";
+
+jest.mock("@/config/env", () => ({
+ publicEnv: {
+ API_ENDPOINT: "https://api.example.com",
+ BASE_ENDPOINT: "https://example.com",
+ ALLOWLIST_API_ENDPOINT: "https://allowlist.example.com",
+ },
+}));
+
+describe("useAlchemyNftQueries", () => {
+ const originalFetch = globalThis.fetch;
+
+ beforeEach(() => {
+ jest.clearAllMocks();
+ });
+
+ afterEach(() => {
+ globalThis.fetch = originalFetch;
+ });
+
+ describe("fetchOwnerNfts", () => {
+ const mockAlchemyResponse = {
+ ownedNfts: [
+ {
+ tokenId: "1",
+ tokenType: "ERC721",
+ name: "Test NFT",
+ tokenUri: "https://example.com/1",
+ image: null,
+ },
+ ],
+ pageKey: undefined,
+ };
+
+ const expectedProcessedResult = [
+ {
+ tokenId: "1",
+ tokenType: "ERC721",
+ name: "Test NFT",
+ tokenUri: "https://example.com/1",
+ image: null,
+ },
+ ];
+
+ it("should return processed data from primary endpoint when successful", async () => {
+ globalThis.fetch = jest.fn().mockResolvedValueOnce({
+ ok: true,
+ json: () => Promise.resolve(mockAlchemyResponse),
+ });
+
+ const result = await fetchOwnerNfts(1, "0x123", "0xowner");
+
+ expect(result).toEqual(expectedProcessedResult);
+ expect(globalThis.fetch).toHaveBeenCalledTimes(1);
+ expect(globalThis.fetch).toHaveBeenCalledWith(
+ "/api/alchemy/owner-nfts?chainId=1&contract=0x123&owner=0xowner",
+ { signal: undefined }
+ );
+ });
+
+ it("should fallback to backend proxy when primary endpoint fails with non-ok response", async () => {
+ globalThis.fetch = jest
+ .fn()
+ .mockResolvedValueOnce({
+ ok: false,
+ status: 400,
+ })
+ .mockResolvedValueOnce({
+ ok: true,
+ json: () => Promise.resolve(mockAlchemyResponse),
+ });
+
+ const result = await fetchOwnerNfts(1, "0x123", "0xowner");
+
+ expect(result).toEqual(expectedProcessedResult);
+ expect(globalThis.fetch).toHaveBeenCalledTimes(2);
+ expect(globalThis.fetch).toHaveBeenNthCalledWith(
+ 1,
+ "/api/alchemy/owner-nfts?chainId=1&contract=0x123&owner=0xowner",
+ { signal: undefined }
+ );
+ expect(globalThis.fetch).toHaveBeenNthCalledWith(
+ 2,
+ `${MOCK_API_ENDPOINT}/alchemy-proxy/owner-nfts?chainId=1&contract=0x123&owner=0xowner`,
+ { signal: undefined }
+ );
+ });
+
+ it("should fallback to backend proxy when primary endpoint throws network error", async () => {
+ globalThis.fetch = jest
+ .fn()
+ .mockRejectedValueOnce(new Error("Network error"))
+ .mockResolvedValueOnce({
+ ok: true,
+ json: () => Promise.resolve(mockAlchemyResponse),
+ });
+
+ const result = await fetchOwnerNfts(1, "0x123", "0xowner");
+
+ expect(result).toEqual(expectedProcessedResult);
+ expect(globalThis.fetch).toHaveBeenCalledTimes(2);
+ });
+
+ it("should throw error when both primary and fallback fail", async () => {
+ globalThis.fetch = jest
+ .fn()
+ .mockResolvedValueOnce({
+ ok: false,
+ status: 500,
+ })
+ .mockResolvedValueOnce({
+ ok: false,
+ status: 500,
+ });
+
+ await expect(fetchOwnerNfts(1, "0x123", "0xowner")).rejects.toThrow(
+ "Request failed with status 500"
+ );
+ expect(globalThis.fetch).toHaveBeenCalledTimes(2);
+ });
+
+ it("should pass abort signal to fetch calls", async () => {
+ const controller = new AbortController();
+ globalThis.fetch = jest.fn().mockResolvedValueOnce({
+ ok: true,
+ json: () => Promise.resolve(mockAlchemyResponse),
+ });
+
+ await fetchOwnerNfts(1, "0x123", "0xowner", controller.signal);
+
+ expect(globalThis.fetch).toHaveBeenCalledWith(expect.any(String), {
+ signal: controller.signal,
+ });
+ });
+
+ it("should handle different chain IDs correctly", async () => {
+ globalThis.fetch = jest.fn().mockResolvedValueOnce({
+ ok: true,
+ json: () => Promise.resolve(mockAlchemyResponse),
+ });
+
+ await fetchOwnerNfts(11155111, "0x123", "0xowner");
+
+ expect(globalThis.fetch).toHaveBeenCalledWith(
+ "/api/alchemy/owner-nfts?chainId=11155111&contract=0x123&owner=0xowner",
+ { signal: undefined }
+ );
+ });
+
+ it("should NOT fallback when request is aborted via AbortController", async () => {
+ const abortError = new DOMException(
+ "The operation was aborted.",
+ "AbortError"
+ );
+ globalThis.fetch = jest.fn().mockRejectedValueOnce(abortError);
+
+ await expect(fetchOwnerNfts(1, "0x123", "0xowner")).rejects.toThrow();
+ expect(globalThis.fetch).toHaveBeenCalledTimes(1);
+ });
+
+ it("should NOT fallback when request throws Error with name AbortError", async () => {
+ const abortError = new Error("The operation was aborted.");
+ abortError.name = "AbortError";
+ globalThis.fetch = jest.fn().mockRejectedValueOnce(abortError);
+
+ await expect(fetchOwnerNfts(1, "0x123", "0xowner")).rejects.toThrow();
+ expect(globalThis.fetch).toHaveBeenCalledTimes(1);
+ });
+
+ it("should process raw Alchemy response correctly", async () => {
+ const rawAlchemyResponse = {
+ ownedNfts: [
+ {
+ tokenId: "123",
+ tokenType: "ERC1155",
+ name: null,
+ tokenUri: null,
+ image: { thumbnailUrl: "https://img.example.com/123.png" },
+ },
+ {
+ tokenId: "456",
+ tokenType: "ERC721",
+ name: "Cool NFT",
+ tokenUri: "https://metadata.example.com/456",
+ image: null,
+ },
+ ],
+ };
+
+ globalThis.fetch = jest.fn().mockResolvedValueOnce({
+ ok: true,
+ json: () => Promise.resolve(rawAlchemyResponse),
+ });
+
+ const result = await fetchOwnerNfts(1, "0x123", "0xowner");
+
+ expect(result).toHaveLength(2);
+ expect(result[0]).toEqual({
+ tokenId: "123",
+ tokenType: "ERC1155",
+ name: null,
+ tokenUri: null,
+ image: { thumbnailUrl: "https://img.example.com/123.png" },
+ });
+ expect(result[1]).toEqual({
+ tokenId: "456",
+ tokenType: "ERC721",
+ name: "Cool NFT",
+ tokenUri: "https://metadata.example.com/456",
+ image: null,
+ });
+ });
+ });
+});
diff --git a/__tests__/services/6529api.test.ts b/__tests__/services/6529api.test.ts
index 5245cb3b0c..0fdab711f6 100644
--- a/__tests__/services/6529api.test.ts
+++ b/__tests__/services/6529api.test.ts
@@ -1,73 +1,99 @@
-import * as api from '@/services/6529api';
-import Cookies from 'js-cookie';
-import { getStagingAuth } from '@/services/auth/auth.utils';
-import { API_AUTH_COOKIE } from '@/constants';
+import { API_AUTH_COOKIE } from "@/constants";
+import * as api from "@/services/6529api";
+import { getStagingAuth } from "@/services/auth/auth.utils";
+import Cookies from "js-cookie";
-jest.mock('js-cookie', () => ({ remove: jest.fn() }));
-jest.mock('@/services/auth/auth.utils', () => ({ getStagingAuth: jest.fn() }));
+jest.mock("js-cookie", () => ({ remove: jest.fn() }));
+jest.mock("@/services/auth/auth.utils", () => ({ getStagingAuth: jest.fn() }));
const { fetchUrl, fetchAllPages, postData, postFormData } = api;
-describe('6529api service', () => {
+function getHeaders(callIndex = 0): Headers {
+ return (globalThis.fetch as jest.Mock).mock.calls[callIndex][1].headers;
+}
+
+describe("6529api service", () => {
beforeEach(() => {
jest.resetAllMocks();
- (global as any).fetch = jest.fn();
+ (globalThis as any).fetch = jest.fn();
});
- it('fetchUrl removes cookie on 401 and returns json', async () => {
- (getStagingAuth as jest.Mock).mockReturnValue('token');
+ it("fetchUrl removes cookie on 401 and returns json", async () => {
+ (getStagingAuth as jest.Mock).mockReturnValue("token");
const json = jest.fn().mockResolvedValue({ ok: true });
- (global.fetch as jest.Mock).mockResolvedValue({ status: 401, json });
+ (globalThis.fetch as jest.Mock).mockResolvedValue({ status: 401, json });
- const result = await fetchUrl('/foo');
+ const result = await fetchUrl("/foo");
- expect(global.fetch).toHaveBeenCalledWith('/foo', { headers: { 'x-6529-auth': 'token' } });
+ expect(globalThis.fetch).toHaveBeenCalledWith("/foo", expect.any(Object));
+ expect(getHeaders().get("x-6529-auth")).toBe("token");
expect(Cookies.remove).toHaveBeenCalledWith(API_AUTH_COOKIE);
expect(result).toEqual({ ok: true });
});
- it('fetchAllPages concatenates pages', async () => {
+ it("fetchAllPages concatenates pages", async () => {
(getStagingAuth as jest.Mock).mockReturnValue(null);
- (global.fetch as jest.Mock)
- .mockResolvedValueOnce({ status: 200, json: async () => ({ data: ['a'], next: 'http://localhost/next' }) })
- .mockResolvedValueOnce({ status: 200, json: async () => ({ data: ['b'] }) });
+ (globalThis.fetch as jest.Mock)
+ .mockResolvedValueOnce({
+ ok: true,
+ status: 200,
+ json: async () => ({ data: ["a"], next: "http://localhost/next" }),
+ })
+ .mockResolvedValueOnce({
+ ok: true,
+ status: 200,
+ json: async () => ({ data: ["b"] }),
+ });
- const result = await fetchAllPages('http://localhost/start');
+ const result = await fetchAllPages("http://localhost/start");
- expect(global.fetch).toHaveBeenCalledTimes(2);
- expect(global.fetch).toHaveBeenNthCalledWith(1, 'http://localhost/start', { headers: {} });
- expect(global.fetch).toHaveBeenNthCalledWith(2, 'http://localhost/next', { headers: {} });
- expect(result).toEqual(['a', 'b']);
+ expect(globalThis.fetch).toHaveBeenCalledTimes(2);
+ expect(globalThis.fetch).toHaveBeenNthCalledWith(
+ 1,
+ "http://localhost/start",
+ expect.any(Object)
+ );
+ expect(globalThis.fetch).toHaveBeenNthCalledWith(
+ 2,
+ "http://localhost/next",
+ expect.any(Object)
+ );
+ expect(getHeaders(0).get("x-6529-auth")).toBeNull();
+ expect(getHeaders(1).get("x-6529-auth")).toBeNull();
+ expect(result).toEqual(["a", "b"]);
});
- it('postData sends JSON body and returns status/response', async () => {
+ it("postData sends JSON body and returns status/response", async () => {
(getStagingAuth as jest.Mock).mockReturnValue(null);
const json = jest.fn().mockResolvedValue({ done: true });
- (global.fetch as jest.Mock).mockResolvedValue({ status: 201, json });
+ (globalThis.fetch as jest.Mock).mockResolvedValue({
+ ok: true,
+ status: 201,
+ json,
+ });
- const result = await postData('/bar', { foo: 'bar' });
+ const result = await postData("/bar", { foo: "bar" });
- expect(global.fetch).toHaveBeenCalledWith('/bar', {
- method: 'POST',
- body: JSON.stringify({ foo: 'bar' }),
- headers: { 'Content-Type': 'application/json' },
- });
+ expect(globalThis.fetch).toHaveBeenCalledWith("/bar", expect.any(Object));
+ expect(getHeaders().get("Content-Type")).toBe("application/json");
+ expect(getHeaders().get("x-6529-auth")).toBeNull();
expect(result).toEqual({ status: 201, response: { done: true } });
});
- it('postFormData sends FormData body with auth header', async () => {
- (getStagingAuth as jest.Mock).mockReturnValue('tok');
+ it("postFormData sends FormData body with auth header", async () => {
+ (getStagingAuth as jest.Mock).mockReturnValue("tok");
const formData = new FormData();
const json = jest.fn().mockResolvedValue({ ok: true });
- (global.fetch as jest.Mock).mockResolvedValue({ status: 200, json });
+ (globalThis.fetch as jest.Mock).mockResolvedValue({
+ ok: true,
+ status: 200,
+ json,
+ });
- const result = await postFormData('/fd', formData);
+ const result = await postFormData("/fd", formData);
- expect(global.fetch).toHaveBeenCalledWith('/fd', {
- method: 'POST',
- body: formData,
- headers: { 'x-6529-auth': 'tok' },
- });
+ expect(globalThis.fetch).toHaveBeenCalledWith("/fd", expect.any(Object));
+ expect(getHeaders().get("x-6529-auth")).toBe("tok");
expect(result).toEqual({ status: 200, response: { ok: true } });
});
});
diff --git a/__tests__/services/alchemy-api.test.ts b/__tests__/services/alchemy-api.test.ts
index 69c516ca92..70380fbba0 100644
--- a/__tests__/services/alchemy-api.test.ts
+++ b/__tests__/services/alchemy-api.test.ts
@@ -1,3 +1,7 @@
+/**
+ * @jest-environment node
+ */
+
jest.mock("@/config/alchemyEnv", () => ({
getAlchemyApiKey: () => "test",
}));
diff --git a/app/api/alchemy/collections/route.ts b/app/api/alchemy/collections/route.ts
index 10f6ed4fff..5e0f01ab3e 100644
--- a/app/api/alchemy/collections/route.ts
+++ b/app/api/alchemy/collections/route.ts
@@ -1,10 +1,18 @@
import { NextRequest, NextResponse } from "next/server";
-import { searchNftCollections } from "@/services/alchemy-api";
+import { getAlchemyApiKey } from "@/config/alchemyEnv";
import type { SupportedChain } from "@/types/nft";
const NO_STORE_HEADERS = { "Cache-Control": "no-store" };
+const NETWORK_MAP: Record = {
+ ethereum: "eth-mainnet",
+};
+
+function resolveNetwork(chain: SupportedChain = "ethereum"): string {
+ return NETWORK_MAP[chain] ?? NETWORK_MAP.ethereum;
+}
+
export async function GET(request: NextRequest) {
const { searchParams } = new URL(request.url);
const query = searchParams.get("query") ?? "";
@@ -16,22 +24,38 @@ export async function GET(request: NextRequest) {
}
const chain = (searchParams.get("chain") ?? "ethereum") as SupportedChain;
- const hideSpam = searchParams.get("hideSpam") !== "0" &&
- searchParams.get("hideSpam") !== "false";
const pageKey = searchParams.get("pageKey") ?? undefined;
try {
- const result = await searchNftCollections({
- query,
- chain,
- hideSpam,
- pageKey,
+ const apiKey = getAlchemyApiKey();
+ const network = resolveNetwork(chain);
+ const url = new URL(
+ `https://${network}.g.alchemy.com/nft/v3/${apiKey}/searchContractMetadata`
+ );
+ url.searchParams.set("query", query.trim());
+ if (pageKey) {
+ url.searchParams.set("pageKey", pageKey);
+ }
+
+ const response = await fetch(url.toString(), {
+ headers: { Accept: "application/json" },
signal: request.signal,
});
- return NextResponse.json(result, { headers: NO_STORE_HEADERS });
+
+ if (!response.ok) {
+ return NextResponse.json(
+ { error: "Failed to search NFT collections" },
+ { status: response.status, headers: NO_STORE_HEADERS }
+ );
+ }
+
+ const payload = await response.json();
+ return NextResponse.json(payload, { headers: NO_STORE_HEADERS });
} catch (error) {
const message =
- error instanceof Error ? error.message : "Failed to search NFT collections";
+ error instanceof Error
+ ? error.message
+ : "Failed to search NFT collections";
return NextResponse.json(
{ error: message },
{ status: 400, headers: NO_STORE_HEADERS }
diff --git a/app/api/alchemy/contract/route.ts b/app/api/alchemy/contract/route.ts
index 88ea682e1f..6df1681ef2 100644
--- a/app/api/alchemy/contract/route.ts
+++ b/app/api/alchemy/contract/route.ts
@@ -1,29 +1,63 @@
import { NextRequest, NextResponse } from "next/server";
-import { getContractOverview } from "@/services/alchemy-api";
+import { getAlchemyApiKey } from "@/config/alchemyEnv";
+import { isValidEthAddress } from "@/helpers/Helpers";
+import { normaliseAddress } from "@/services/alchemy/utils";
import type { SupportedChain } from "@/types/nft";
const NO_STORE_HEADERS = { "Cache-Control": "no-store" };
+const NETWORK_MAP: Record = {
+ ethereum: "eth-mainnet",
+};
+
+function resolveNetwork(chain: SupportedChain = "ethereum"): string {
+ return NETWORK_MAP[chain] ?? NETWORK_MAP.ethereum;
+}
+
export async function GET(request: NextRequest) {
const { searchParams } = new URL(request.url);
- const address = searchParams.get("address") as `0x${string}` | null;
- if (!address) {
+ const address = searchParams.get("address");
+ if (!address || !isValidEthAddress(address)) {
return NextResponse.json(
{ error: "address is required" },
{ status: 400, headers: NO_STORE_HEADERS }
);
}
+ const checksum = normaliseAddress(address);
+ if (!checksum) {
+ return NextResponse.json(null, { headers: NO_STORE_HEADERS });
+ }
+
const chain = (searchParams.get("chain") ?? "ethereum") as SupportedChain;
try {
- const overview = await getContractOverview({
- address,
- chain,
+ const apiKey = getAlchemyApiKey();
+ const network = resolveNetwork(chain);
+ const url = `https://${network}.g.alchemy.com/nft/v3/${apiKey}/getContractMetadata?contractAddress=${checksum}`;
+
+ const response = await fetch(url, {
+ headers: { Accept: "application/json" },
signal: request.signal,
});
- return NextResponse.json(overview, { headers: NO_STORE_HEADERS });
+
+ if (response.status === 404) {
+ return NextResponse.json(null, { headers: NO_STORE_HEADERS });
+ }
+
+ if (!response.ok) {
+ return NextResponse.json(
+ { error: "Failed to fetch contract metadata" },
+ { status: response.status, headers: NO_STORE_HEADERS }
+ );
+ }
+
+ const payload = await response.json();
+ return NextResponse.json(
+ { ...payload, _checksum: checksum },
+ { headers: NO_STORE_HEADERS }
+ );
} catch (error) {
const message =
error instanceof Error ? error.message : "Failed to fetch contract metadata";
diff --git a/app/api/alchemy/owner-nfts/route.ts b/app/api/alchemy/owner-nfts/route.ts
index 1df37f3d98..973f467013 100644
--- a/app/api/alchemy/owner-nfts/route.ts
+++ b/app/api/alchemy/owner-nfts/route.ts
@@ -1,9 +1,15 @@
import { NextRequest, NextResponse } from "next/server";
-import { getNftsForContractAndOwner } from "@/services/alchemy-api";
+import { getAlchemyApiKey } from "@/config/alchemyEnv";
const NO_STORE_HEADERS = { "Cache-Control": "no-store" };
+function resolveNetworkByChainId(chainId: number): string {
+ if (chainId === 11155111) return "eth-sepolia";
+ if (chainId === 5) return "eth-goerli";
+ return "eth-mainnet";
+}
+
export async function GET(request: NextRequest) {
const { searchParams } = new URL(request.url);
const chainIdRaw = searchParams.get("chainId");
@@ -27,16 +33,30 @@ export async function GET(request: NextRequest) {
}
try {
- const nfts = await getNftsForContractAndOwner(
- chainId,
- contract,
- owner,
- [],
- pageKey,
- 0,
- request.signal
- );
- return NextResponse.json(nfts, { headers: NO_STORE_HEADERS });
+ const apiKey = getAlchemyApiKey();
+ const network = resolveNetworkByChainId(chainId);
+ const params = new URLSearchParams();
+ params.set("owner", owner);
+ params.append("contractAddresses[]", contract);
+ if (pageKey) {
+ params.set("pageKey", pageKey);
+ }
+ const url = `https://${network}.g.alchemy.com/nft/v3/${apiKey}/getNFTsForOwner?${params.toString()}`;
+
+ const response = await fetch(url, {
+ headers: { accept: "application/json" },
+ signal: request.signal,
+ });
+
+ if (!response.ok) {
+ return NextResponse.json(
+ { error: "Failed to fetch NFTs for owner" },
+ { status: response.status, headers: NO_STORE_HEADERS }
+ );
+ }
+
+ const payload = await response.json();
+ return NextResponse.json(payload, { headers: NO_STORE_HEADERS });
} catch (error) {
const message =
error instanceof Error ? error.message : "Failed to fetch NFTs for owner";
diff --git a/app/api/alchemy/token-metadata/route.ts b/app/api/alchemy/token-metadata/route.ts
index 19c473520c..96a5517c97 100644
--- a/app/api/alchemy/token-metadata/route.ts
+++ b/app/api/alchemy/token-metadata/route.ts
@@ -1,20 +1,102 @@
import { NextRequest, NextResponse } from "next/server";
-import { getTokensMetadata } from "@/services/alchemy-api";
-import type { SupportedChain, TokenMetadata } from "@/types/nft";
+import { getAlchemyApiKey } from "@/config/alchemyEnv";
+import { isValidEthAddress } from "@/helpers/Helpers";
+import { fetchPublicJson, UrlGuardError } from "@/lib/security/urlGuard";
+import { normaliseAddress, resolveNetwork } from "@/services/alchemy/utils";
+import type { SupportedChain } from "@/types/nft";
const NO_STORE_HEADERS = { "Cache-Control": "no-store" };
+const MAX_BATCH_SIZE = 100;
type TokenMetadataRequestBody = {
- readonly address?: `0x${string}`;
+ readonly address?: string;
readonly tokenIds?: string[];
readonly tokens?: { contract: string; tokenId: string }[];
readonly chain?: SupportedChain;
};
-type SerializableTokenMetadata = Omit & {
- readonly tokenId: string;
-};
+type TokenToFetch = { contractAddress: string; tokenId: string };
+
+type ParseResult =
+ | { ok: true; tokens: TokenToFetch[]; chain: SupportedChain }
+ | { ok: false; response: NextResponse };
+
+function parseTokensArray(
+ tokens: { contract: string; tokenId: string }[]
+): TokenToFetch[] {
+ const results: TokenToFetch[] = [];
+ for (const t of tokens) {
+ const contractAddress = normaliseAddress(t.contract);
+ if (contractAddress) {
+ results.push({ contractAddress, tokenId: t.tokenId });
+ }
+ }
+ return results;
+}
+
+function parseAddressAndIds(
+ address: string,
+ tokenIds: string[]
+): TokenToFetch[] | null {
+ if (!isValidEthAddress(address)) {
+ return null;
+ }
+ const checksum = normaliseAddress(address);
+ if (!checksum) {
+ return [];
+ }
+ return tokenIds.map((tokenId) => ({ contractAddress: checksum, tokenId }));
+}
+
+function parseRequestBody(body: TokenMetadataRequestBody): ParseResult {
+ const { address, tokenIds, tokens, chain = "ethereum" } = body ?? {};
+
+ if (tokens && tokens.length > 0) {
+ const parsed = parseTokensArray(tokens);
+ if (parsed.length === 0) {
+ return {
+ ok: false,
+ response: NextResponse.json(
+ { error: "No valid contract addresses provided" },
+ { status: 400, headers: NO_STORE_HEADERS }
+ ),
+ };
+ }
+ return { ok: true, tokens: parsed, chain };
+ }
+
+ if (address && Array.isArray(tokenIds) && tokenIds.length > 0) {
+ const parsed = parseAddressAndIds(address, tokenIds);
+ if (parsed === null) {
+ return {
+ ok: false,
+ response: NextResponse.json(
+ { error: "Invalid contract address" },
+ { status: 400, headers: NO_STORE_HEADERS }
+ ),
+ };
+ }
+ if (parsed.length === 0) {
+ return {
+ ok: false,
+ response: NextResponse.json(
+ { error: "Normalization failed for the provided address" },
+ { status: 400, headers: NO_STORE_HEADERS }
+ ),
+ };
+ }
+ return { ok: true, tokens: parsed, chain };
+ }
+
+ return {
+ ok: false,
+ response: NextResponse.json(
+ { error: "Either tokens OR (address and tokenIds) are required" },
+ { status: 400, headers: NO_STORE_HEADERS }
+ ),
+ };
+}
export async function POST(request: NextRequest) {
let body: TokenMetadataRequestBody;
@@ -27,34 +109,48 @@ export async function POST(request: NextRequest) {
);
}
- const { address, tokenIds, tokens, chain = "ethereum" } = body ?? {};
-
- if (
- (!tokens || tokens.length === 0) &&
- (!address || !Array.isArray(tokenIds) || tokenIds.length === 0)
- ) {
- return NextResponse.json(
- { error: "Either tokens OR (address and tokenIds) are required" },
- { status: 400, headers: NO_STORE_HEADERS }
- );
+ const parseResult = parseRequestBody(body);
+ if (!parseResult.ok) {
+ return parseResult.response;
}
+ const { tokens: tokensToFetch, chain } = parseResult;
+
try {
- const metadata = await getTokensMetadata({
- address,
- tokenIds,
- tokens,
- chain,
- signal: request.signal,
- });
- const serializable: SerializableTokenMetadata[] = metadata.map(
- (entry) => ({
- ...entry,
- tokenId: entry.tokenId.toString(),
- })
+ const apiKey = getAlchemyApiKey();
+ const network = resolveNetwork(chain);
+ const url = `https://${network}.g.alchemy.com/nft/v3/${apiKey}/getNFTMetadataBatch`;
+ const allTokens: unknown[] = [];
+
+ for (let i = 0; i < tokensToFetch.length; i += MAX_BATCH_SIZE) {
+ const slice = tokensToFetch.slice(i, i + MAX_BATCH_SIZE);
+ const payload = await fetchPublicJson<{ tokens?: unknown[]; nfts?: unknown[] }>(
+ url,
+ {
+ method: "POST",
+ headers: {
+ "Content-Type": "application/json",
+ Accept: "application/json",
+ },
+ body: JSON.stringify({ tokens: slice }),
+ signal: request.signal,
+ },
+ { timeoutMs: 10000 }
+ );
+ allTokens.push(...(payload.tokens ?? payload.nfts ?? []));
+ }
+
+ return NextResponse.json(
+ { tokens: allTokens },
+ { headers: NO_STORE_HEADERS }
);
- return NextResponse.json(serializable, { headers: NO_STORE_HEADERS });
} catch (error) {
+ if (error instanceof UrlGuardError) {
+ return NextResponse.json(
+ { error: error.message },
+ { status: error.statusCode, headers: NO_STORE_HEADERS }
+ );
+ }
const message =
error instanceof Error ? error.message : "Failed to fetch token metadata";
return NextResponse.json(
diff --git a/components/nextGen/collections/collectionParts/mint/NextGenMintBurnWidget.tsx b/components/nextGen/collections/collectionParts/mint/NextGenMintBurnWidget.tsx
index 9754f6db5b..d50b2e96eb 100644
--- a/components/nextGen/collections/collectionParts/mint/NextGenMintBurnWidget.tsx
+++ b/components/nextGen/collections/collectionParts/mint/NextGenMintBurnWidget.tsx
@@ -1,20 +1,8 @@
"use client";
-import { publicEnv } from "@/config/env";
-import { faInfoCircle } from "@fortawesome/free-solid-svg-icons";
-import { FontAwesomeIcon } from "@fortawesome/react-fontawesome";
-import { useEffect, useState } from "react";
-import { Button, Col, Container, Form, Row, Table } from "react-bootstrap";
-import { Tooltip } from "react-tooltip";
-import { useChainId, useWriteContract } from "wagmi";
-import { NextGenCollection } from "@/entities/INextgen";
-import {
- areEqualAddresses,
- getNetworkName,
-} from "@/helpers/Helpers";
-import { fetchUrl } from "@/services/6529api";
import { useSeizeConnectContext } from "@/components/auth/SeizeConnectContext";
import NextGenContractWriteStatus from "@/components/nextGen/NextGenContractWriteStatus";
+import styles from "@/components/nextGen/collections/NextGen.module.scss";
import {
NEXTGEN_CHAIN_ID,
NEXTGEN_CORE,
@@ -30,7 +18,18 @@ import {
getStatusFromDates,
useMintSharedState,
} from "@/components/nextGen/nextgen_helpers";
-import styles from "@/components/nextGen/collections/NextGen.module.scss";
+import { publicEnv } from "@/config/env";
+import { NextGenCollection } from "@/entities/INextgen";
+import { areEqualAddresses, getNetworkName } from "@/helpers/Helpers";
+import { fetchOwnerNfts } from "@/hooks/useAlchemyNftQueries";
+import { fetchUrl } from "@/services/6529api";
+import type { OwnerNft } from "@/services/alchemy/types";
+import { faInfoCircle } from "@fortawesome/free-solid-svg-icons";
+import { FontAwesomeIcon } from "@fortawesome/react-fontawesome";
+import { useEffect, useState } from "react";
+import { Button, Col, Container, Form, Row, Table } from "react-bootstrap";
+import { Tooltip } from "react-tooltip";
+import { useChainId, useWriteContract } from "wagmi";
import { Spinner } from "./NextGenMint";
import { NextGenMintingFor } from "./NextGenMintShared";
@@ -80,15 +79,16 @@ export default function NextGenMintBurnWidget(props: Readonly) {
const [tokensOwnedForBurnAddressLoaded, setTokensOwnedForBurnAddressLoaded] =
useState(false);
const [tokensOwnedForBurnAddress, setTokensOwnedForBurnAddress] = useState<
- any[]
+ OwnerNft[]
>([]);
- function filterTokensOwnedForBurnAddress(r: any[]) {
+ function filterTokensOwnedForBurnAddress(r: OwnerNft[]) {
if (props.collection_merkle.max_token_index > 0) {
r = r.filter((t) => {
+ const tokenIdNum = Number(t.tokenId);
return (
- t.tokenId >= props.collection_merkle.min_token_index &&
- t.tokenId <= props.collection_merkle.max_token_index
+ tokenIdNum >= props.collection_merkle.min_token_index &&
+ tokenIdNum <= props.collection_merkle.max_token_index
);
});
}
@@ -99,9 +99,7 @@ export default function NextGenMintBurnWidget(props: Readonly) {
)
) {
r = r.filter((t) =>
- t.tokenId
- .toString()
- .startsWith(props.collection_merkle.burn_collection_id)
+ t.tokenId.startsWith(String(props.collection_merkle.burn_collection_id))
);
}
return r;
@@ -113,21 +111,13 @@ export default function NextGenMintBurnWidget(props: Readonly) {
return;
}
const controller = new AbortController();
- const searchParams = new URLSearchParams({
- chainId: String(NEXTGEN_CHAIN_ID),
- contract: NEXTGEN_CORE[NEXTGEN_CHAIN_ID],
- owner: burnAddress,
- });
- fetch(`/api/alchemy/owner-nfts?${searchParams.toString()}`, {
- signal: controller.signal,
- })
- .then(async (response) => {
- if (!response.ok) {
- throw new Error("Failed to fetch NFTs for owner");
- }
- return (await response.json()) as any[];
- })
+ fetchOwnerNfts(
+ NEXTGEN_CHAIN_ID,
+ NEXTGEN_CORE[NEXTGEN_CHAIN_ID],
+ burnAddress,
+ controller.signal
+ )
.then((r) => {
setTokensOwnedForBurnAddressLoaded(true);
const filteredTokens = filterTokensOwnedForBurnAddress(r);
diff --git a/hooks/useAlchemyNftQueries.ts b/hooks/useAlchemyNftQueries.ts
index 1cd4cc98d4..145cb13686 100644
--- a/hooks/useAlchemyNftQueries.ts
+++ b/hooks/useAlchemyNftQueries.ts
@@ -1,18 +1,30 @@
"use client";
-import { useEffect, useMemo } from "react";
import {
keepPreviousData,
useQuery,
useQueryClient,
} from "@tanstack/react-query";
+import { useEffect, useMemo } from "react";
import { QueryKey } from "@/components/react-query-wrapper/ReactQueryWrapper";
+import { publicEnv } from "@/config/env";
import { useDebouncedValue } from "@/hooks/useDebouncedValue";
import type {
+ AlchemyContractMetadataResponse,
+ AlchemyGetNftsForOwnerResponse,
+ AlchemySearchResponse,
+ AlchemyTokenMetadataResponse,
+ OwnerNft,
SearchContractsResult,
- TokenMetadataParams,
} from "@/services/alchemy/types";
+import {
+ normaliseAddress,
+ processContractMetadataResponse,
+ processOwnerNftsResponse,
+ processSearchResponse,
+ processTokenMetadataResponse,
+} from "@/services/alchemy/utils";
import type {
ContractOverview,
Suggestion,
@@ -33,11 +45,24 @@ const suggestionCache = new Map>();
const contractCache = new Map>();
const tokenCache = new Map>();
-type SerializedTokenMetadata = Omit & {
- tokenId: string;
-};
+function getBackendAlchemyProxyUrl(path: string): string {
+ return `${publicEnv.API_ENDPOINT}/alchemy-proxy${path}`;
+}
-async function fetchJson(input: RequestInfo, init?: RequestInit): Promise {
+function isAbortError(error: unknown): boolean {
+ if (error instanceof DOMException && error.name === "AbortError") {
+ return true;
+ }
+ if (error instanceof Error && error.name === "AbortError") {
+ return true;
+ }
+ return false;
+}
+
+async function fetchJson(
+ input: RequestInfo,
+ init?: RequestInit
+): Promise {
const response = await fetch(input, init);
if (!response.ok) {
throw new Error(`Request failed with status ${response.status}`);
@@ -45,6 +70,33 @@ async function fetchJson(input: RequestInfo, init?: RequestInit): Promise
return (await response.json()) as T;
}
+async function fetchJsonWithFailover(
+ primaryUrl: string,
+ backendPath: string,
+ init?: RequestInit
+): Promise {
+ try {
+ return await fetchJson(primaryUrl, init);
+ } catch (error) {
+ if (isAbortError(error)) {
+ throw error;
+ }
+ const backendUrl = getBackendAlchemyProxyUrl(backendPath);
+ console.warn(
+ `Failed to fetch from primary endpoint (${primaryUrl}), falling back to proxy endpoint: (${backendUrl})`
+ );
+ return fetchJson(backendUrl, init);
+ }
+}
+
+type TokenMetadataParams = {
+ readonly address?: `0x${string}`;
+ readonly tokenIds?: readonly string[];
+ readonly tokens?: readonly { contract: string; tokenId: string }[];
+ readonly chain?: SupportedChain;
+ readonly signal?: AbortSignal;
+};
+
async function fetchCollectionsFromApi(
params: UseCollectionSearchParams & { readonly signal?: AbortSignal }
): Promise {
@@ -52,11 +104,15 @@ async function fetchCollectionsFromApi(
const search = new URLSearchParams();
search.set("query", query);
search.set("chain", chain);
- search.set("hideSpam", hideSpam ? "1" : "0");
- return fetchJson(
- `/api/alchemy/collections?${search.toString()}`,
+ const queryString = search.toString();
+
+ const payload = await fetchJsonWithFailover(
+ `/api/alchemy/collections?${queryString}`,
+ `/collections?${queryString}`,
{ signal }
);
+
+ return processSearchResponse(payload, hideSpam);
}
async function fetchContractOverviewFromApi(
@@ -66,39 +122,54 @@ async function fetchContractOverviewFromApi(
if (!address) {
return null;
}
+
+ const checksum = normaliseAddress(address);
+ if (!checksum) {
+ return null;
+ }
+
const search = new URLSearchParams();
search.set("address", address);
search.set("chain", chain);
- return fetchJson(
- `/api/alchemy/contract?${search.toString()}`,
- { signal }
- );
+ const queryString = search.toString();
+
+ const payload = await fetchJsonWithFailover<
+ (AlchemyContractMetadataResponse & { _checksum?: string }) | null
+ >(`/api/alchemy/contract?${queryString}`, `/contract?${queryString}`, {
+ signal,
+ });
+
+ if (!payload) {
+ return null;
+ }
+
+ const checksumFromResponse = (payload._checksum ?? checksum) as `0x${string}`;
+ return processContractMetadataResponse(payload, checksumFromResponse);
}
async function fetchTokenMetadataFromApi(
params: TokenMetadataParams
): Promise {
- const response = await fetch("/api/alchemy/token-metadata", {
+ const body = JSON.stringify({
+ address: params.address,
+ tokenIds: params.tokenIds,
+ tokens: params.tokens,
+ chain: params.chain ?? "ethereum",
+ });
+ const init: RequestInit = {
method: "POST",
- headers: {
- "Content-Type": "application/json",
- },
- body: JSON.stringify({
- address: params.address,
- tokenIds: params.tokenIds,
- tokens: params.tokens,
- chain: params.chain ?? "ethereum",
- }),
+ headers: { "Content-Type": "application/json" },
+ body,
signal: params.signal,
- });
- if (!response.ok) {
- throw new Error(`Request failed with status ${response.status}`);
- }
- const payload = (await response.json()) as SerializedTokenMetadata[];
- return payload.map((entry) => ({
- ...entry,
- tokenId: BigInt(entry.tokenId),
- }));
+ };
+
+ const payload = await fetchJsonWithFailover(
+ "/api/alchemy/token-metadata",
+ "/token-metadata",
+ init
+ );
+
+ return processTokenMetadataResponse(payload);
}
function gcExpired(map: Map>, now = Date.now()): void {
@@ -151,7 +222,9 @@ function getTokenCacheKey(params: TokenMetadataParams): string {
}
return 0;
});
- return `${params.chain ?? "ethereum"}:${address.toLowerCase()}:${ids.join("|")}`;
+ return `${params.chain ?? "ethereum"}:${address.toLowerCase()}:${ids.join(
+ "|"
+ )}`;
}
type UseCollectionSearchParams = {
@@ -191,11 +264,7 @@ export function useCollectionSearch({
staleTime: SUGGESTION_TTL,
gcTime: SUGGESTION_TTL,
queryFn: async ({ signal }) => {
- const cacheKey = getSuggestionCacheKey(
- debouncedQuery,
- chain,
- hideSpam
- );
+ const cacheKey = getSuggestionCacheKey(debouncedQuery, chain, hideSpam);
const now = Date.now();
gcExpired(suggestionCache, now);
const cached = suggestionCache.get(cacheKey);
@@ -361,3 +430,24 @@ export function primeContractCache(
expires: Date.now() + CONTRACT_TTL,
});
}
+
+export async function fetchOwnerNfts(
+ chainId: number,
+ contract: string,
+ owner: string,
+ signal?: AbortSignal
+): Promise {
+ const search = new URLSearchParams();
+ search.set("chainId", String(chainId));
+ search.set("contract", contract);
+ search.set("owner", owner);
+ const queryString = search.toString();
+
+ const payload = await fetchJsonWithFailover(
+ `/api/alchemy/owner-nfts?${queryString}`,
+ `/owner-nfts?${queryString}`,
+ { signal }
+ );
+
+ return processOwnerNftsResponse(payload.ownedNfts ?? []);
+}
diff --git a/jest.setup.js b/jest.setup.js
index 671f5bbf3f..73930cc36b 100644
--- a/jest.setup.js
+++ b/jest.setup.js
@@ -9,32 +9,12 @@ globalThis.TextDecoder = TextDecoder;
require("@testing-library/jest-dom");
-// Mock CSS parsing for react-bootstrap and other CSS-dependent components
-Object.defineProperty(window, "getComputedStyle", {
- value: () => ({
- getPropertyValue: (prop) => {
- if (prop === "transition-duration" || prop === "animation-duration") {
- return "0s";
- }
- return "";
- },
- transitionDuration: "0s",
- animationDuration: "0s",
- }),
-});
-
// Mock CSS module imports
globalThis.CSS = {
supports: () => false,
escape: (str) => str,
};
-// Mock DOM methods that Bootstrap modals might use
-Object.defineProperty(window, "scrollTo", {
- value: () => {},
- writable: true,
-});
-
// Mock CSS functions that dom-helpers/css might use
globalThis.css = (element, property, value) => {
if (arguments.length === 3) {
@@ -52,20 +32,43 @@ globalThis.css = (element, property, value) => {
return "";
};
-// Mock matchMedia for device detection
-Object.defineProperty(window, "matchMedia", {
- writable: true,
- value: jest.fn().mockImplementation((query) => ({
- matches: false,
- media: query,
- onchange: null,
- addListener: jest.fn(), // deprecated
- removeListener: jest.fn(), // deprecated
- addEventListener: jest.fn(),
- removeEventListener: jest.fn(),
- dispatchEvent: jest.fn(),
- })),
-});
+// Only set up window mocks in jsdom environment
+if (globalThis.window !== undefined) {
+ // Mock CSS parsing for react-bootstrap and other CSS-dependent components
+ Object.defineProperty(window, "getComputedStyle", {
+ value: () => ({
+ getPropertyValue: (prop) => {
+ if (prop === "transition-duration" || prop === "animation-duration") {
+ return "0s";
+ }
+ return "";
+ },
+ transitionDuration: "0s",
+ animationDuration: "0s",
+ }),
+ });
+
+ // Mock DOM methods that Bootstrap modals might use
+ Object.defineProperty(window, "scrollTo", {
+ value: () => {},
+ writable: true,
+ });
+
+ // Mock matchMedia for device detection
+ Object.defineProperty(window, "matchMedia", {
+ writable: true,
+ value: jest.fn().mockImplementation((query) => ({
+ matches: false,
+ media: query,
+ onchange: null,
+ addListener: jest.fn(),
+ removeListener: jest.fn(),
+ addEventListener: jest.fn(),
+ removeEventListener: jest.fn(),
+ dispatchEvent: jest.fn(),
+ })),
+ });
+}
/**
* Provide a sane default PUBLIC_RUNTIME blob for tests that indirectly import config/env.
@@ -100,11 +103,11 @@ globalThis.ResizeObserver = class ResizeObserver {
constructor(callback) {
this.callback = callback;
}
-
+
observe() {} // Intentionally empty - no actual observation needed in tests
-
+
unobserve() {} // Intentionally empty - no actual observation needed in tests
-
+
disconnect() {} // Intentionally empty - no actual observation needed in tests
};
diff --git a/services/6529api.ts b/services/6529api.ts
index f0ce9ad695..4b51fe591b 100644
--- a/services/6529api.ts
+++ b/services/6529api.ts
@@ -16,6 +16,7 @@ function buildAuthHeaders(init?: HeadersInit): Headers {
function handleResponseError(res: Response): void {
if (res.status === 401) {
Cookies.remove(API_AUTH_COOKIE);
+ return;
}
if (!res.ok) {
throw new Error(`HTTP error! status: ${res.status}`);
diff --git a/services/alchemy/collections.ts b/services/alchemy/collections.ts
index 3f03fc39b8..df986d031a 100644
--- a/services/alchemy/collections.ts
+++ b/services/alchemy/collections.ts
@@ -1,9 +1,8 @@
import { isValidEthAddress } from "@/helpers/Helpers";
-import type { ContractOverview, Suggestion } from "@/types/nft";
+import { getAlchemyApiKey } from "@/config/alchemyEnv";
+import type { ContractOverview } from "@/types/nft";
import type {
- AlchemyContractResult,
- AlchemyContractMetadata,
AlchemyContractMetadataResponse,
AlchemySearchResponse,
ContractOverviewParams,
@@ -11,18 +10,23 @@ import type {
SearchContractsResult,
} from "./types";
import {
- extractContract,
ensureQuery,
normaliseAddress,
+ processContractMetadataResponse,
+ processSearchResponse,
resolveNetwork,
- resolveOpenSeaMetadata,
} from "./utils";
-import { getAlchemyApiKey } from "@/config/alchemyEnv";
export async function searchNftCollections(
params: SearchContractsParams
): Promise {
- const { query, chain = "ethereum", pageKey, hideSpam = true, signal } = params;
+ const {
+ query,
+ chain = "ethereum",
+ pageKey,
+ hideSpam = true,
+ signal,
+ } = params;
const trimmed = ensureQuery(query);
const network = resolveNetwork(chain);
const apiKey = getAlchemyApiKey();
@@ -42,24 +46,8 @@ export async function searchNftCollections(
if (!response.ok) {
throw new Error("Failed to search NFT collections");
}
- const payload = (await response.json()) as
- | AlchemySearchResponse
- | undefined;
- const contracts = payload?.contracts ?? [];
- const suggestions = contracts
- .map((contract) => extractContract(contract))
- .filter((suggestion): suggestion is Suggestion => suggestion !== null);
- const hiddenCount = hideSpam
- ? suggestions.filter((suggestion) => suggestion.isSpam).length
- : 0;
- const visibleItems = hideSpam
- ? suggestions.filter((suggestion) => !suggestion.isSpam)
- : suggestions;
- return {
- items: visibleItems,
- hiddenCount,
- nextPageKey: payload?.pageKey,
- };
+ const payload = (await response.json()) as AlchemySearchResponse | undefined;
+ return processSearchResponse(payload, hideSpam);
}
export async function getContractOverview(
@@ -91,33 +79,5 @@ export async function getContractOverview(
const payload = (await response.json()) as
| AlchemyContractMetadataResponse
| undefined;
- if (!payload) {
- return null;
- }
- const baseMeta: AlchemyContractMetadata =
- payload.contractMetadata ?? payload;
- const openSeaMetadata = resolveOpenSeaMetadata(
- payload,
- payload.contractMetadata,
- baseMeta
- );
- const contract: AlchemyContractResult = {
- ...baseMeta,
- contractMetadata: baseMeta,
- address: checksum,
- contractAddress: checksum,
- openSeaMetadata,
- isSpam:
- payload.isSpam ?? baseMeta.isSpam ?? baseMeta.spamInfo?.isSpam,
- };
- const suggestion = extractContract(contract);
- if (!suggestion) {
- return null;
- }
- const openSea = openSeaMetadata;
- return {
- ...suggestion,
- description: openSea?.description ?? null,
- bannerImageUrl: openSea?.bannerImageUrl ?? null,
- };
+ return processContractMetadataResponse(payload, checksum);
}
diff --git a/services/alchemy/owner-nfts.ts b/services/alchemy/owner-nfts.ts
index 5083c9400c..917b23af0e 100644
--- a/services/alchemy/owner-nfts.ts
+++ b/services/alchemy/owner-nfts.ts
@@ -1,13 +1,18 @@
import { goerli, sepolia } from "wagmi/chains";
+import { getAlchemyApiKey } from "@/config/alchemyEnv";
import type {
AlchemyGetNftsForOwnerResponse,
AlchemyOwnedNft,
+ OwnerNft,
} from "./types";
-import { getAlchemyApiKey } from "@/config/alchemyEnv";
+import { processOwnerNftsResponse } from "./utils";
const MAX_GET_NFTS_RETRIES = 3;
-const legacyOptions = { method: "GET", headers: { accept: "application/json" } };
+const legacyOptions = {
+ method: "GET",
+ headers: { accept: "application/json" },
+};
function createAbortError(signal: AbortSignal): Error {
if (signal.reason instanceof Error) {
@@ -51,7 +56,10 @@ function delayWithAbort(ms: number, signal?: AbortSignal): Promise {
});
}
-async function fetchLegacyUrl(url: string, signal?: AbortSignal): Promise {
+async function fetchLegacyUrl(
+ url: string,
+ signal?: AbortSignal
+): Promise {
const response = await fetch(url, { ...legacyOptions, signal });
return (await response.json()) as T;
}
@@ -64,7 +72,7 @@ export async function getNftsForContractAndOwner(
pageKey?: string,
retries = 0,
signal?: AbortSignal
-) {
+): Promise {
if (!contract || !owner) {
throw new Error("Contract and owner are required");
}
@@ -111,14 +119,5 @@ export async function getNftsForContractAndOwner(
nextPageKey = response.pageKey;
}
- const allNfts = ownedNfts.map((nft) => {
- return {
- tokenId: nft.tokenId,
- tokenType: nft.tokenType,
- name: nft.name,
- tokenUri: nft.tokenUri,
- image: nft.image,
- };
- });
- return allNfts;
+ return processOwnerNftsResponse(ownedNfts);
}
diff --git a/services/alchemy/tokens.ts b/services/alchemy/tokens.ts
index 5e1405ef2f..6caed8690e 100644
--- a/services/alchemy/tokens.ts
+++ b/services/alchemy/tokens.ts
@@ -1,61 +1,19 @@
import { isValidEthAddress } from "@/helpers/Helpers";
import type { TokenMetadata } from "@/types/nft";
+import { getAlchemyApiKey } from "@/config/alchemyEnv";
import type {
- AlchemyTokenMetadataEntry,
AlchemyTokenMetadataResponse,
TokenMetadataParams,
} from "./types";
-import { normaliseAddress, pickThumbnail, resolveNetwork } from "./utils";
-import { getAlchemyApiKey } from "@/config/alchemyEnv";
+import {
+ normaliseAddress,
+ processTokenMetadataResponse,
+ resolveNetwork,
+} from "./utils";
const MAX_BATCH_SIZE = 100;
-function parseTokenIdToBigint(tokenId: string): bigint {
- if (!tokenId) {
- throw new Error("Token ID missing");
- }
- const trimmed = tokenId.trim();
- if (trimmed.startsWith("0x") || trimmed.startsWith("0X")) {
- return BigInt(trimmed);
- }
- return BigInt(trimmed);
-}
-
-function normaliseTokenMetadata(
- token: AlchemyTokenMetadataEntry
-): TokenMetadata | null {
- const tokenIdRaw = token.tokenId ?? "";
- try {
- const tokenId = parseTokenIdToBigint(tokenIdRaw);
- const imageUrl = pickThumbnail({
- image: token.image ?? undefined,
- media: token.media ?? undefined,
- });
- return {
- tokenId,
- tokenIdRaw,
- contract: token.contract?.address ?? undefined,
- name:
- token.title ??
- token.name ??
- token.metadata?.name ??
- token.raw?.metadata?.name ??
- null,
- imageUrl,
- collectionName:
- token.collection?.name ??
- token.contract?.openSeaMetadata?.collectionName ??
- token.contract?.name ??
- null,
- isSpam: token.isSpam ?? token.spamInfo?.isSpam ?? false,
- };
- } catch (error) {
- console.warn("Failed to parse token metadata", tokenIdRaw, error);
- return null;
- }
-}
-
export async function getTokensMetadata(
params: TokenMetadataParams
): Promise {
@@ -116,13 +74,8 @@ export async function getTokensMetadata(
throw new Error("Failed to fetch token metadata");
}
const payload = (await response.json()) as AlchemyTokenMetadataResponse;
- const tokens = payload.tokens ?? payload.nfts ?? [];
- for (const token of tokens) {
- const normalised = normaliseTokenMetadata(token);
- if (normalised) {
- results.push(normalised);
- }
- }
+ const batchResults = processTokenMetadataResponse(payload);
+ results.push(...batchResults);
}
return results;
}
diff --git a/services/alchemy/types.ts b/services/alchemy/types.ts
index f25ec08b33..d764c9dcf6 100644
--- a/services/alchemy/types.ts
+++ b/services/alchemy/types.ts
@@ -1,7 +1,4 @@
-import type {
- Suggestion,
- SupportedChain,
-} from "@/types/nft";
+import type { Suggestion, SupportedChain } from "@/types/nft";
export type SearchContractsParams = {
query: string;
@@ -123,7 +120,9 @@ export type AlchemyOwnedNftAcquiredAt = {
};
export type AlchemyTokenMetadataEntry = {
- contract?: (AlchemyContractMetadata & { spamClassifications?: string[] | null }) | null;
+ contract?:
+ | (AlchemyContractMetadata & { spamClassifications?: string[] | null })
+ | null;
tokenId?: string;
tokenType?: string | null;
title?: string | null;
@@ -158,6 +157,14 @@ export type AlchemyOwnedNft = AlchemyTokenMetadataEntry & {
balance?: string | null;
};
+export type OwnerNft = {
+ tokenId: string;
+ tokenType: string | null;
+ name: string | null;
+ tokenUri: string | null;
+ image: AlchemyNftMedia | null;
+};
+
export type AlchemyGetNftsForOwnerResponse = {
ownedNfts: AlchemyOwnedNft[];
pageKey?: string;
diff --git a/services/alchemy/utils.ts b/services/alchemy/utils.ts
index 5c2df88dec..74aa258c49 100644
--- a/services/alchemy/utils.ts
+++ b/services/alchemy/utils.ts
@@ -2,10 +2,18 @@ import { getAddress } from "viem";
import { isValidEthAddress } from "@/helpers/Helpers";
import type {
+ AlchemyContractMetadata,
+ AlchemyContractMetadataResponse,
AlchemyContractResult,
AlchemyOpenSeaMetadata,
+ AlchemyOwnedNft,
+ AlchemySearchResponse,
+ AlchemyTokenMetadataEntry,
+ AlchemyTokenMetadataResponse,
+ OwnerNft,
+ SearchContractsResult,
} from "./types";
-import type { Suggestion, SupportedChain } from "@/types/nft";
+import type { ContractOverview, Suggestion, SupportedChain, TokenMetadata } from "@/types/nft";
const NETWORK_MAP: Record = {
ethereum: "eth-mainnet",
@@ -205,3 +213,117 @@ export function extractContract(
deployer: deployer ?? null,
};
}
+
+export function processSearchResponse(
+ payload: AlchemySearchResponse | undefined,
+ hideSpam: boolean
+): SearchContractsResult {
+ const contracts = payload?.contracts ?? [];
+ const suggestions = contracts
+ .map((contract) => extractContract(contract))
+ .filter((suggestion): suggestion is Suggestion => suggestion !== null);
+ const hiddenCount = hideSpam
+ ? suggestions.filter((suggestion) => suggestion.isSpam).length
+ : 0;
+ const visibleItems = hideSpam
+ ? suggestions.filter((suggestion) => !suggestion.isSpam)
+ : suggestions;
+ return {
+ items: visibleItems,
+ hiddenCount,
+ nextPageKey: payload?.pageKey,
+ };
+}
+
+export function processContractMetadataResponse(
+ payload: AlchemyContractMetadataResponse | undefined,
+ checksum: `0x${string}`
+): ContractOverview | null {
+ if (!payload) {
+ return null;
+ }
+ const baseMeta: AlchemyContractMetadata = payload.contractMetadata ?? payload;
+ const openSeaMetadata = resolveOpenSeaMetadata(
+ payload,
+ payload.contractMetadata,
+ baseMeta
+ );
+ const contract: AlchemyContractResult = {
+ ...baseMeta,
+ contractMetadata: baseMeta,
+ address: checksum,
+ contractAddress: checksum,
+ openSeaMetadata,
+ isSpam: payload.isSpam ?? baseMeta.isSpam ?? baseMeta.spamInfo?.isSpam,
+ };
+ const suggestion = extractContract(contract);
+ if (!suggestion) {
+ return null;
+ }
+ return {
+ ...suggestion,
+ description: openSeaMetadata?.description ?? null,
+ bannerImageUrl: openSeaMetadata?.bannerImageUrl ?? null,
+ };
+}
+
+export function processOwnerNftsResponse(ownedNfts: AlchemyOwnedNft[]): OwnerNft[] {
+ return ownedNfts.map((nft) => ({
+ tokenId: nft.tokenId ?? "",
+ tokenType: nft.tokenType ?? null,
+ name: nft.name ?? null,
+ tokenUri: nft.tokenUri ?? null,
+ image: nft.image ?? null,
+ }));
+}
+
+function parseTokenIdToBigint(tokenId: string): bigint {
+ if (!tokenId) {
+ throw new Error("Token ID missing");
+ }
+ const trimmed = tokenId.trim();
+ return BigInt(trimmed);
+}
+
+function normaliseTokenMetadata(token: AlchemyTokenMetadataEntry): TokenMetadata | null {
+ const tokenIdRaw = token.tokenId ?? "";
+ try {
+ const tokenId = parseTokenIdToBigint(tokenIdRaw);
+ const imageUrl = pickThumbnail({
+ image: token.image ?? undefined,
+ media: token.media ?? undefined,
+ });
+ return {
+ tokenId,
+ tokenIdRaw,
+ contract: token.contract?.address ?? undefined,
+ name:
+ token.title ??
+ token.name ??
+ token.metadata?.name ??
+ token.raw?.metadata?.name ??
+ null,
+ imageUrl,
+ collectionName:
+ token.collection?.name ??
+ token.contract?.openSeaMetadata?.collectionName ??
+ token.contract?.name ??
+ null,
+ isSpam: token.isSpam ?? token.spamInfo?.isSpam ?? false,
+ };
+ } catch {
+ return null;
+ }
+}
+
+export function processTokenMetadataResponse(payload: AlchemyTokenMetadataResponse): TokenMetadata[] {
+ const tokens = payload.tokens ?? payload.nfts ?? [];
+ const results: TokenMetadata[] = [];
+ for (const token of tokens) {
+ const normalised = normaliseTokenMetadata(token);
+ if (normalised) {
+ results.push(normalised);
+ }
+ }
+ return results;
+}