Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 1 addition & 1 deletion .github/workflows/build-test.yml
Original file line number Diff line number Diff line change
Expand Up @@ -33,7 +33,7 @@ jobs:

strategy:
matrix:
node-version: [20.x]
node-version: [22.x]
provider: [sqlite, postgresql]

steps:
Expand Down
2 changes: 1 addition & 1 deletion .github/workflows/bump-version.yml
Original file line number Diff line number Diff line change
Expand Up @@ -26,7 +26,7 @@ jobs:
- name: Use Node.js
uses: actions/setup-node@v4
with:
node-version: 20.x
node-version: 22.x
cache: 'pnpm'

- name: Install dependencies
Expand Down
2 changes: 1 addition & 1 deletion .github/workflows/publish-release.yml
Original file line number Diff line number Diff line change
Expand Up @@ -28,7 +28,7 @@ jobs:
- name: Use Node.js
uses: actions/setup-node@v4
with:
node-version: 20.x
node-version: 22.x
cache: 'pnpm'
registry-url: 'https://registry.npmjs.org'

Expand Down
2 changes: 1 addition & 1 deletion .github/workflows/update-samples.yml
Original file line number Diff line number Diff line change
Expand Up @@ -65,7 +65,7 @@ jobs:
if: steps.check-package.outputs.exists == 'true'
uses: actions/setup-node@v4
with:
node-version: 20.x
node-version: 22.x
cache: 'npm'

- name: Update @zenstackhq packages to latest
Expand Down
1 change: 1 addition & 0 deletions .gitignore
Original file line number Diff line number Diff line change
Expand Up @@ -9,3 +9,4 @@ dist
.pnpm-store
*.vsix
.DS_Store
coverage
2 changes: 1 addition & 1 deletion CONTRIBUTING.md
Original file line number Diff line number Diff line change
Expand Up @@ -6,7 +6,7 @@ Before you start working on anything major, please make sure to open an issue or

## Prerequisites

- Node.js: v20 or above
- Node.js: v22 or above
- PNPM: as specified in [package.json](./package.json)

Test cases are run against both SQLite and Postgres. You should have a postgres server (16 or above) running (either natively or via Docker). The default connection is:
Expand Down
8 changes: 7 additions & 1 deletion package.json
Original file line number Diff line number Diff line change
Expand Up @@ -12,6 +12,7 @@
"test:all": "pnpm run test:sqlite && pnpm run test:pg",
"test:pg": "TEST_DB_PROVIDER=postgresql turbo run test",
"test:sqlite": "TEST_DB_PROVIDER=sqlite turbo run test",
"test:coverage": "vitest run --coverage",
"format": "prettier --write \"**/*.{ts,tsx,md}\"",
"pr": "gh pr create --fill-first --base dev",
"merge-main": "gh pr create --title \"merge dev to main\" --body \"\" --base main --head dev",
Expand All @@ -26,8 +27,10 @@
"devDependencies": {
"@eslint/js": "^9.29.0",
"@types/node": "catalog:",
"@vitest/coverage-v8": "^4.0.16",
"eslint": "~9.29.0",
"glob": "^11.1.0",
"npm-run-all": "^4.1.5",
"prettier": "^3.5.3",
"prisma": "catalog:",
"tsup": "^8.5.0",
Expand All @@ -40,7 +43,10 @@
},
"pnpm": {
"onlyBuiltDependencies": [
"better-sqlite3"
"@parcel/watcher",
"better-sqlite3",
"esbuild",
"vue-demi"
]
}
}
3 changes: 3 additions & 0 deletions packages/cli/package.json
Original file line number Diff line number Diff line change
Expand Up @@ -28,6 +28,9 @@
"test": "vitest run",
"pack": "pnpm pack"
},
"exports": {
"./package.json": "./package.json"
},
"dependencies": {
"@zenstackhq/common-helpers": "workspace:*",
"@zenstackhq/language": "workspace:*",
Expand Down
4 changes: 4 additions & 0 deletions packages/clients/client-helpers/eslint.config.js
Original file line number Diff line number Diff line change
@@ -0,0 +1,4 @@
import config from '@zenstackhq/eslint-config/base.js';

/** @type {import("eslint").Linter.Config} */
export default config;
40 changes: 40 additions & 0 deletions packages/clients/client-helpers/package.json
Original file line number Diff line number Diff line change
@@ -0,0 +1,40 @@
{
"name": "@zenstackhq/client-helpers",
"version": "3.0.0",
"description": "Helpers for implementing clients that consume ZenStack's CRUD service",
"type": "module",
"scripts": {
"build": "tsc --noEmit && tsup-node && pnpm test:typecheck",
"watch": "tsup-node --watch",
"lint": "eslint src --ext ts",
"test": "vitest run",
"test:typecheck": "tsc --noEmit --project tsconfig.test.json",
"pack": "pnpm pack"
},
"author": "ZenStack Team",
"license": "MIT",
"exports": {
".": {
"types": "./dist/index.d.ts",
"default": "./dist/index.js"
},
"./fetch": {
"types": "./dist/fetch.d.ts",
"default": "./dist/fetch.js"
}
},
"dependencies": {
"@zenstackhq/common-helpers": "workspace:*",
"@zenstackhq/schema": "workspace:*",
"decimal.js": "catalog:",
"superjson": "^2.2.3"
},
"devDependencies": {
"@zenstackhq/eslint-config": "workspace:*",
"@zenstackhq/language": "workspace:*",
"@zenstackhq/orm": "workspace:*",
"@zenstackhq/sdk": "workspace:*",
"@zenstackhq/typescript-config": "workspace:*",
"@zenstackhq/vitest-config": "workspace:*"
}
}
4 changes: 4 additions & 0 deletions packages/clients/client-helpers/src/constants.ts
Original file line number Diff line number Diff line change
@@ -0,0 +1,4 @@
/**
* The default query endpoint.
*/
export const DEFAULT_QUERY_ENDPOINT = '/api/model';
107 changes: 107 additions & 0 deletions packages/clients/client-helpers/src/fetch.ts
Original file line number Diff line number Diff line change
@@ -0,0 +1,107 @@
import { lowerCaseFirst } from '@zenstackhq/common-helpers';
import Decimal from 'decimal.js';
import SuperJSON from 'superjson';
import type { QueryError } from './types';

/**
* Function signature for `fetch`.
*/
export type FetchFn = (url: string, options?: RequestInit) => Promise<Response>;

/**
* A fetcher function that uses fetch API to make HTTP requests and automatically unmarshals
* the response using superjson.
*/
export async function fetcher<R>(url: string, options?: RequestInit, customFetch?: FetchFn): Promise<R> {
const _fetch = customFetch ?? fetch;
const res = await _fetch(url, options);
if (!res.ok) {
const errData = unmarshal(await res.text());
if (errData.error?.rejectedByPolicy && errData.error?.rejectReason === 'cannot-read-back') {
// policy doesn't allow mutation result to be read back, just return undefined
return undefined as any;
}
const error: QueryError = new Error('An error occurred while fetching the data.');
error.info = errData.error;
error.status = res.status;
throw error;
}

const textResult = await res.text();
try {
return unmarshal(textResult).data as R;
} catch (err) {
console.error(`Unable to deserialize data:`, textResult);
throw err;
}
}

/**
* Makes a URL for the given endpoint, model, operation, and args that matches the RPC-style server API.
*/
export function makeUrl(endpoint: string, model: string, operation: string, args?: unknown) {
const baseUrl = `${endpoint}/${lowerCaseFirst(model)}/${operation}`;
if (!args) {
return baseUrl;
}

const { data, meta } = serialize(args);
let result = `${baseUrl}?q=${encodeURIComponent(JSON.stringify(data))}`;
if (meta) {
result += `&meta=${encodeURIComponent(JSON.stringify({ serialization: meta }))}`;
}
return result;
}

SuperJSON.registerCustom<Decimal, string>(
{
isApplicable: (v): v is Decimal =>
v instanceof Decimal ||
// interop with decimal.js
v?.toStringTag === '[object Decimal]',
serialize: (v) => v.toJSON(),
deserialize: (v) => new Decimal(v),
},
'Decimal',
);

/**
* Serialize the given value with superjson
*/
export function serialize(value: unknown): { data: unknown; meta: unknown } {
const { json, meta } = SuperJSON.serialize(value);
return { data: json, meta };
}

/**
* Deserialize the given value with superjson using the given metadata
*/
export function deserialize(value: unknown, meta: any): unknown {
return SuperJSON.deserialize({ json: value as any, meta });
}

/**
* Marshal the given value to a string using superjson
*/
export function marshal(value: unknown) {
const { data, meta } = serialize(value);
if (meta) {
return JSON.stringify({ ...(data as any), meta: { serialization: meta } });
} else {
return JSON.stringify(data);
}
}

/**
* Unmarshal the given string value using superjson, assuming the value is a JSON stringified
* object containing the serialized data and serialization metadata.
*/
export function unmarshal(value: string) {
const parsed = JSON.parse(value);
if (typeof parsed === 'object' && parsed?.data && parsed?.meta?.serialization) {
const deserializedData = deserialize(parsed.data, parsed.meta.serialization);
return { ...parsed, data: deserializedData };
} else {
return parsed;
}
}
9 changes: 9 additions & 0 deletions packages/clients/client-helpers/src/index.ts
Original file line number Diff line number Diff line change
@@ -0,0 +1,9 @@
export * from './constants';
export * from './invalidation';
export * from './logging';
export * from './mutator';
export * from './nested-read-visitor';
export * from './nested-write-visitor';
export * from './optimistic';
export * from './query-analysis';
export * from './types';
89 changes: 89 additions & 0 deletions packages/clients/client-helpers/src/invalidation.ts
Original file line number Diff line number Diff line change
@@ -0,0 +1,89 @@
import type { SchemaDef } from '@zenstackhq/schema';
import { log, type Logger } from './logging';
import { getMutatedModels, getReadModels } from './query-analysis';
import type { MaybePromise, ORMWriteActionType } from './types';

/**
* Type for a predicate that determines whether a query should be invalidated.
*/
export type InvalidationPredicate = ({ model, args }: { model: string; args: unknown }) => boolean;

/**
* Type for a function that invalidates queries matching the given predicate.
*/
export type InvalidateFunc = (predicate: InvalidationPredicate) => MaybePromise<void>;

/**
* Create a function that invalidates queries affected by the given mutation operation.
*
* @param model Model under mutation.
* @param operation Mutation operation (e.g, `update`).
* @param schema The schema.
* @param invalidator Function to invalidate queries matching a predicate. It should internally
* enumerate all query cache entries and invalidate those for which the predicate returns true.
* @param logging Logging option.
*/
export function createInvalidator(
model: string,
operation: string,
schema: SchemaDef,
invalidator: InvalidateFunc,
logging: Logger | undefined,
) {
return async (...args: unknown[]) => {
const [_, variables] = args;
const predicate = await getInvalidationPredicate(
model,
operation as ORMWriteActionType,
variables,
schema,
logging,
);
await invalidator(predicate);
};
}

// gets a predicate for evaluating whether a query should be invalidated
async function getInvalidationPredicate(
model: string,
operation: ORMWriteActionType,
mutationArgs: any,
schema: SchemaDef,
logging: Logger | undefined,
): Promise<InvalidationPredicate> {
const mutatedModels = await getMutatedModels(model, operation, mutationArgs, schema);

return ({ model, args }) => {
if (mutatedModels.includes(model)) {
// direct match
if (logging) {
log(
logging,
`Marking "${model}" query for invalidation due to mutation "${operation}", query args: ${JSON.stringify(args)}`,
);
}
return true;
}

if (args) {
// traverse query args to find nested reads that match the model under mutation
if (findNestedRead(model, mutatedModels, schema, args)) {
if (logging) {
log(
logging,
`Marking "${model}" query for invalidation due to mutation "${operation}", query args: ${JSON.stringify(args)}`,
);
}
return true;
}
}

return false;
};
}

// find nested reads that match the given models
function findNestedRead(visitingModel: string, targetModels: string[], schema: SchemaDef, args: any) {
const modelsRead = getReadModels(visitingModel, schema, args);
return targetModels.some((m) => modelsRead.includes(m));
}
15 changes: 15 additions & 0 deletions packages/clients/client-helpers/src/logging.ts
Original file line number Diff line number Diff line change
@@ -0,0 +1,15 @@
/**
* Logger configuration. `true` enables console logging. A function can be provided for custom logging.
*/
export type Logger = boolean | ((message: string) => void);

/**
* Logs a message using the provided logger.
*/
export function log(logger: Logger, message: string) {
if (typeof logger === 'function') {
logger(message);
} else if (logger) {
console.log(message);
}
}
Loading