From 190991912cf87bca156dc63607999dec253f81fa Mon Sep 17 00:00:00 2001 From: T6 Date: Sat, 12 Nov 2022 09:24:38 -0800 Subject: [PATCH] chore: semicolon purge (#393) --- Readme.md | 8 +- _tasks/build_wasm.ts | 16 +- _tasks/dnt.ts | 12 +- _tasks/download_frame_metadata.ts | 34 ++-- _tasks/run_browser.ts | 148 ++++++++-------- _tasks/star.ts | 14 +- codegen.ts | 42 ++--- codegen/Files.ts | 30 ++-- codegen/codecVisitor.test.ts | 60 +++---- codegen/codecVisitor.ts | 86 ++++----- codegen/genMetadata.ts | 28 +-- codegen/mod.ts | 36 ++-- codegen/typeVisitor.ts | 96 +++++------ codegen/utils.ts | 80 ++++----- compat/mod.ts | 2 +- compat/pjs.ts | 16 +- deno.jsonc | 3 +- deps/dprint.ts | 8 +- deps/polkadot/keyring.ts | 2 +- deps/polkadot/keyring/types.ts | 2 +- deps/polkadot/types.ts | 2 +- deps/polkadot/util-crypto.ts | 2 +- deps/scale.ts | 2 +- deps/smoldot.ts | 2 +- deps/smoldot/client.d.ts | 2 +- deps/std/async.ts | 2 +- deps/std/encoding/base58.ts | 2 +- deps/std/encoding/hex.ts | 2 +- deps/std/flags.ts | 2 +- deps/std/fs.ts | 2 +- deps/std/path.ts | 2 +- deps/std/testing/asserts.ts | 2 +- deps/std/testing/snapshot.ts | 2 +- deps/zones.ts | 2 +- docs/Configs.md | 12 +- docs/Effects.md | 60 +++---- docs/Quick_Start.md | 28 +-- docs/Reading.md | 8 +- docs/Testing.md | 6 +- docs/Types.md | 68 ++++---- dprint.json | 3 +- effects/blockWatch.ts | 30 ++-- effects/const.ts | 32 ++-- effects/entryRead.ts | 36 ++-- effects/entryWatch.ts | 52 +++--- effects/events.ts | 30 ++-- effects/extrinsic.test.ts | 46 ++--- effects/extrinsic.ts | 122 ++++++------- effects/keyPageRead.ts | 38 ++-- effects/metadata.ts | 54 +++--- effects/mod.ts | 26 +-- effects/rpc.ts | 80 ++++----- effects/rpc_known_clients.ts | 22 +-- effects/rpc_known_methods.ts | 36 ++-- effects/scale.ts | 46 ++--- examples/all.ts | 4 +- examples/balance.ts | 10 +- examples/batch.ts | 14 +- examples/derived.ts | 12 +- examples/first_ten_keys.ts | 10 +- examples/metadata.ts | 10 +- examples/multisig_transfer.ts | 60 +++---- examples/polkadot_js_signer.ts | 22 +-- examples/raw_rpc_client_call.ts | 8 +- examples/raw_rpc_client_subscription.ts | 26 +-- examples/read_block.ts | 12 +- examples/read_bonded.ts | 12 +- examples/read_era_rewards.ts | 10 +- examples/read_events.ts | 8 +- examples/rpc_call.ts | 8 +- examples/rpc_subscription.ts | 18 +- examples/ticker.ts | 18 +- examples/transfer.ts | 26 +-- examples/watch_blocks.ts | 18 +- examples/watch_events.ts | 16 +- frame_metadata/Codec.test.ts | 126 +++++++------- frame_metadata/Codec.ts | 88 +++++----- frame_metadata/Contract.ts | 142 +++++++-------- frame_metadata/Era.ts | 64 +++---- frame_metadata/Extrinsic.ts | 220 ++++++++++++------------ frame_metadata/Key.test.ts | 70 ++++---- frame_metadata/Key.ts | 60 +++---- frame_metadata/Metadata.test.ts | 48 +++--- frame_metadata/Metadata.ts | 128 +++++++------- frame_metadata/TyVisitor.ts | 114 ++++++------ frame_metadata/mod.ts | 16 +- frame_metadata/scale_info.ts | 116 ++++++------- frame_metadata/test-common.ts | 24 +-- hashers/blake2b.test.ts | 36 ++-- hashers/blake2b.ts | 114 ++++++------ hashers/blake2b.wasm.ts | 4 +- hashers/mod.test.ts | 42 ++--- hashers/mod.ts | 116 ++++++------- hashers/xxhash.test.ts | 40 ++--- hashers/xxhash.ts | 120 ++++++------- hashers/xxhash.wasm.ts | 4 +- mod.ts | 18 +- rpc/client.test.ts | 56 +++--- rpc/client.ts | 152 ++++++++-------- rpc/known/author.ts | 52 +++--- rpc/known/babe.ts | 12 +- rpc/known/beefy.ts | 8 +- rpc/known/chain.ts | 54 +++--- rpc/known/childstate.ts | 28 +-- rpc/known/contracts.ts | 138 +++++++-------- rpc/known/framesystem.ts | 12 +- rpc/known/grandpa.ts | 40 ++--- rpc/known/mmr.ts | 22 +-- rpc/known/mod.ts | 32 ++-- rpc/known/offchain.ts | 14 +- rpc/known/payment.ts | 32 ++-- rpc/known/state.ts | 116 ++++++------- rpc/known/statemigration.ts | 10 +- rpc/known/system.ts | 74 ++++---- rpc/known/utils.ts | 26 +-- rpc/messages.ts | 48 +++--- rpc/mod.ts | 14 +- rpc/provider/base.ts | 36 ++-- rpc/provider/errors.ts | 14 +- rpc/provider/proxy.test.ts | 28 +-- rpc/provider/proxy.ts | 104 +++++------ rpc/provider/smoldot.test.ts | 108 ++++++------ rpc/provider/smoldot.ts | 102 +++++------ ss58/mod.test.ts | 38 ++-- ss58/mod.ts | 94 +++++----- test_util/clients/kusama.ts | 4 +- test_util/clients/mod.ts | 8 +- test_util/clients/polkadot.ts | 4 +- test_util/clients/rococo.ts | 4 +- test_util/clients/westend.ts | 4 +- test_util/common.ts | 46 ++--- test_util/ctx.ts | 62 +++---- test_util/extrinsic.ts | 32 ++-- test_util/local.ts | 74 ++++---- test_util/mod.ts | 10 +- test_util/pairs.ts | 42 ++--- util/Counter.ts | 10 +- util/Listener.ts | 8 +- util/branded.ts | 20 +-- util/error.ts | 4 +- util/hex.ts | 22 +-- util/map.ts | 14 +- util/mod.ts | 16 +- util/tuple.ts | 4 +- util/types.ts | 4 +- words.txt | 1 + 146 files changed, 2739 insertions(+), 2736 deletions(-) diff --git a/Readme.md b/Readme.md index 1d665f66f..dbe6d392b 100644 --- a/Readme.md +++ b/Readme.md @@ -23,14 +23,14 @@ deno run -A -r https://deno.land/x/capi/codegen.ts \ Make use of those bindings. ```ts -import * as C from "capi"; -import { system } from "./polkadot/frame.ts"; +import * as C from "capi" +import { system } from "./polkadot/frame.ts" // bind to the last inserted key -const key = system.account.keys.first; +const key = system.account.keys.first // bind to the corresponding value -const value = C.run(system.account.get(key)); +const value = C.run(system.account.get(key)) ``` ## The Thesis diff --git a/_tasks/build_wasm.ts b/_tasks/build_wasm.ts index 109b3cbdd..849481e8b 100644 --- a/_tasks/build_wasm.ts +++ b/_tasks/build_wasm.ts @@ -1,12 +1,12 @@ -import * as path from "../deps/std/path.ts"; -import * as hex from "../util/hex.ts"; +import * as path from "../deps/std/path.ts" +import * as hex from "../util/hex.ts" const wasmPaths = [ "hashers/xxhash", "hashers/blake2b", -]; +] -await Promise.all(wasmPaths.map(build)); +await Promise.all(wasmPaths.map(build)) async function build(wasmPath: string) { const process = Deno.run({ @@ -14,13 +14,13 @@ async function build(wasmPath: string) { stdout: "piped", stderr: "inherit", stdin: "null", - }); + }) if (!(await process.status()).success) { - throw new Error(wasmPath + ".wat build failed"); + throw new Error(wasmPath + ".wat build failed") } - const wasm = await process.output(); + const wasm = await process.output() await Deno.writeTextFile( wasmPath + ".wasm.ts", @@ -31,5 +31,5 @@ import { Hex, hex } from "${path.relative(path.dirname(wasmPath), "util/mod.ts") export default hex.decode(\n"${hex.encode(wasm).replace(/.{0,64}|$/g, "\\\n$&")}" as Hex,\n); `.trimStart(), - ); + ) } diff --git a/_tasks/dnt.ts b/_tasks/dnt.ts index 2d5f6c54b..50302582c 100755 --- a/_tasks/dnt.ts +++ b/_tasks/dnt.ts @@ -1,10 +1,10 @@ -import { build } from "https://deno.land/x/dnt@0.26.0/mod.ts"; -import * as fs from "../deps/std/fs.ts"; -import * as path from "../deps/std/path.ts"; +import { build } from "https://deno.land/x/dnt@0.26.0/mod.ts" +import * as fs from "../deps/std/fs.ts" +import * as path from "../deps/std/path.ts" -const outDir = path.join("target", "npm"); +const outDir = path.join("target", "npm") -await fs.emptyDir(outDir); +await fs.emptyDir(outDir) await Promise.all([ build({ @@ -49,4 +49,4 @@ await Promise.all([ }), fs.copy("LICENSE", path.join(outDir, "LICENSE")), fs.copy("Readme.md", path.join(outDir, "Readme.md")), -]); +]) diff --git a/_tasks/download_frame_metadata.ts b/_tasks/download_frame_metadata.ts index 3f8c0173f..327951249 100755 --- a/_tasks/download_frame_metadata.ts +++ b/_tasks/download_frame_metadata.ts @@ -1,33 +1,33 @@ -import * as fs from "../deps/std/fs.ts"; -import * as path from "../deps/std/path.ts"; -import * as Z from "../deps/zones.ts"; -import * as C from "../mod.ts"; -import * as U from "../util/mod.ts"; +import * as fs from "../deps/std/fs.ts" +import * as path from "../deps/std/path.ts" +import * as Z from "../deps/zones.ts" +import * as C from "../mod.ts" +import * as U from "../util/mod.ts" -const outDir = path.join(Deno.cwd(), "frame_metadata", "_downloaded"); -await fs.emptyDir(outDir); -U.throwIfError(await Z.ls(...Object.entries(C.knownClients).map(download)).run()); +const outDir = path.join(Deno.cwd(), "frame_metadata", "_downloaded") +await fs.emptyDir(outDir) +U.throwIfError(await Z.ls(...Object.entries(C.knownClients).map(download)).run()) function download, Client extends Z.$>( entry: [name: Name, client: Client], ) { return Z.ls(...entry).next(async ([name, client]) => { try { - const metadataHex = U.throwIfError(await C.state.getMetadata(client)().run()); - const outPath = path.join(outDir, `${name}.scale`); - console.log(`Downloading ${name} metadata to "${outPath}".`); - await Deno.writeTextFile(outPath, metadataHex); - return; + const metadataHex = U.throwIfError(await C.state.getMetadata(client)().run()) + const outPath = path.join(outDir, `${name}.scale`) + console.log(`Downloading ${name} metadata to "${outPath}".`) + await Deno.writeTextFile(outPath, metadataHex) + return } catch (cause) { - return new MetadataDownloadError(name, { cause }); + return new MetadataDownloadError(name, { cause }) } - }); + }) } class MetadataDownloadError extends Error { - override readonly name = "MetadataDownloadError"; + override readonly name = "MetadataDownloadError" constructor(readonly chainName: string, options: ErrorOptions) { - super(undefined, options); + super(undefined, options) } } diff --git a/_tasks/run_browser.ts b/_tasks/run_browser.ts index 628f11ec3..0d2af70e8 100644 --- a/_tasks/run_browser.ts +++ b/_tasks/run_browser.ts @@ -1,151 +1,151 @@ -import { contentType } from "https://deno.land/x/media_types@v2.11.1/mod.ts"; -import { Application, send } from "https://deno.land/x/oak@v10.1.0/mod.ts"; -import { babel, babelPresetTypeScript } from "https://escad.dev/deps/babel.ts"; -import * as path from "../deps/std/path.ts"; +import { contentType } from "https://deno.land/x/media_types@v2.11.1/mod.ts" +import { Application, send } from "https://deno.land/x/oak@v10.1.0/mod.ts" +import { babel, babelPresetTypeScript } from "https://escad.dev/deps/babel.ts" +import * as path from "../deps/std/path.ts" -const dirname = path.dirname(path.fromFileUrl(import.meta.url)); +const dirname = path.dirname(path.fromFileUrl(import.meta.url)) -const port = +(Deno.env.get("PORT") ?? "8080"); +const port = +(Deno.env.get("PORT") ?? "8080") -const transpiledDir = path.join(dirname, "../target/transpiled"); +const transpiledDir = path.join(dirname, "../target/transpiled") -const getTranspiledLocation = (url: string) => path.join(transpiledDir, getTranspiledPath(url)); +const getTranspiledLocation = (url: string) => path.join(transpiledDir, getTranspiledPath(url)) const getTranspiledPath = (url: string) => { - return `/${url}`; -}; + return `/${url}` +} const transpiler = createTranspiler({ cache: { has: async (url) => { - if (url.startsWith("file://")) return false; + if (url.startsWith("file://")) return false try { - await Deno.lstat(getTranspiledLocation(url)); - return true; + await Deno.lstat(getTranspiledLocation(url)) + return true } catch (e) { if (e instanceof Deno.errors.NotFound) { - return false; + return false } else { - throw e; + throw e } } }, set: async (url, result) => { - const loc = getTranspiledLocation(url); - await Deno.mkdir(path.dirname(loc), { recursive: true }); - await Deno.writeTextFile(loc, result); + const loc = getTranspiledLocation(url) + await Deno.mkdir(path.dirname(loc), { recursive: true }) + await Deno.writeTextFile(loc, result) }, }, transformUrl: getTranspiledPath, -}); +}) -const rootFile = new URL(Deno.args[0]!, path.toFileUrl(Deno.cwd() + "/")).toString(); +const rootFile = new URL(Deno.args[0]!, path.toFileUrl(Deno.cwd() + "/")).toString() -const app = new Application(); +const app = new Application() app.use(async (ctx) => { - let path = ctx.request.url.pathname; + let path = ctx.request.url.pathname if (ctx.request.url.pathname === "/") { - transpiler.memo.clear(); - await transpiler.transpile(rootFile); - path = "/index.html"; + transpiler.memo.clear() + await transpiler.transpile(rootFile) + path = "/index.html" } await send(ctx, path, { root: transpiledDir, contentTypes: new Proxy({}, { get: (_target, key) => { - return contentType(key as string) ?? contentType(".js"); + return contentType(key as string) ?? contentType(".js") }, }), - }); -}); + }) +}) -const rootFilePath = getTranspiledPath(transformUrl(rootFile)); +const rootFilePath = getTranspiledPath(transformUrl(rootFile)) await Deno.writeTextFile( path.join(transpiledDir, "index.html"), ` `.trim(), -); +) -console.log(`http://localhost:${port}/`); +console.log(`http://localhost:${port}/`) -await app.listen({ port }); +await app.listen({ port }) function transformUrl(url: string) { return url .replace(/[?#]/g, "_") .replace(/\.\./g, "__") - .replace(/\.ts$|(? Promise; - set: (url: string, result: string) => Promise; - }; - transformUrl: (url: string) => string; + has: (url: string) => Promise + set: (url: string, result: string) => Promise + } + transformUrl: (url: string) => string } interface Transpiler extends TranspilerHost { - memo: Map>; - transpile: (url: string, force?: boolean) => Promise; - transpileAll: (urls: string[], force?: boolean) => Promise; + memo: Map> + transpile: (url: string, force?: boolean) => Promise + transpileAll: (urls: string[], force?: boolean) => Promise } function createTranspiler(ctx: TranspilerHost): Transpiler { - const memo = new Map>(); + const memo = new Map>() - return { ...ctx, memo, transpile, transpileAll }; + return { ...ctx, memo, transpile, transpileAll } function transpile(url: string, force = false) { - return transpileAll([url], force); + return transpileAll([url], force) } async function transpileAll(urls: string[], force = false) { - const done = new Set(urls); - const waiting = urls.map((url) => _transpile(url, force)); + const done = new Set(urls) + const waiting = urls.map((url) => _transpile(url, force)) while (waiting.length) { for (const dep of await waiting.pop()!) { - if (done.has(dep)) continue; - done.add(dep); - waiting.push(_transpile(dep)); + if (done.has(dep)) continue + done.add(dep) + waiting.push(_transpile(dep)) } } } function _transpile(url: string, force = false) { if (!force) { - const running = memo.get(url); + const running = memo.get(url) if (running) { - return running; + return running } } - console.log("transpiling", url); + console.log("transpiling", url) const prom = (async () => { if (!force && await ctx.cache.has(url)) { - return []; + return [] } - const [result, deps] = await __transpile(url); - deps.map((x) => transpile(x)); - await ctx.cache.set(transformUrl(url), result); - return deps; - })(); - memo.set(url, prom); - return prom; + const [result, deps] = await __transpile(url) + deps.map((x) => transpile(x)) + await ctx.cache.set(transformUrl(url), result) + return deps + })() + memo.set(url, prom) + return prom } async function fetchFile(url: string) { - const response = await fetch(url); + const response = await fetch(url) if (!response.ok) { - throw Object.assign(new Error(`Error fetching ${url} for transpilation`), { response }); + throw Object.assign(new Error(`Error fetching ${url} for transpilation`), { response }) } - const content = await response.text(); - return content; + const content = await response.text() + return content } async function __transpile(url: string) { - const content = await fetchFile(url); - const deps: string[] = []; + const content = await fetchFile(url) + const deps: string[] = [] const result = await babel.transformAsync(content, { filename: url, presets: [ @@ -166,21 +166,21 @@ function createTranspiler(ctx: TranspilerHost): Transpiler { path.parent.type, ) ) { - return; + return } - const str = path.node.value; - const resolved = (new URL(str, url)).toString(); - deps.push(resolved); + const str = path.node.value + const resolved = (new URL(str, url)).toString() + deps.push(resolved) path.replaceWith(babel.types.stringLiteral( ctx.transformUrl(transformUrl(resolved)), - )); - path.skip(); + )) + path.skip() }, }, }, ], - }); - if (result?.code == null) throw new Error("Babel returned null"); - return [result.code, deps] as const; + }) + if (result?.code == null) throw new Error("Babel returned null") + return [result.code, deps] as const } } diff --git a/_tasks/star.ts b/_tasks/star.ts index ffa4fd50a..e2b7bfaed 100755 --- a/_tasks/star.ts +++ b/_tasks/star.ts @@ -1,16 +1,16 @@ -import * as fs from "../deps/std/fs.ts"; -import * as path from "../deps/std/path.ts"; +import * as fs from "../deps/std/fs.ts" +import * as path from "../deps/std/path.ts" -let generated = ""; +let generated = "" for await ( const entry of fs.walk(".", { match: [/\.ts$/], skip: [/^target\//], }) ) { - generated += `import ${JSON.stringify(`./${entry.path}`)};\n`; + generated += `import ${JSON.stringify(`./${entry.path}`)};\n` } -const dest = path.join(Deno.cwd(), "_star.ts"); -console.log(`Writing "${dest}".`); -await Deno.writeTextFile(dest, generated); +const dest = path.join(Deno.cwd(), "_star.ts") +console.log(`Writing "${dest}".`) +await Deno.writeTextFile(dest, generated) diff --git a/codegen.ts b/codegen.ts index 80284d01f..8311a9117 100644 --- a/codegen.ts +++ b/codegen.ts @@ -1,9 +1,9 @@ // TODO: prettier messaging & help screens -import { codegen } from "./codegen/mod.ts"; -import { parse } from "./deps/std/flags.ts"; -import * as C from "./mod.ts"; -import * as T from "./test_util/mod.ts"; -import * as U from "./util/mod.ts"; +import { codegen } from "./codegen/mod.ts" +import { parse } from "./deps/std/flags.ts" +import * as C from "./mod.ts" +import * as T from "./test_util/mod.ts" +import * as U from "./util/mod.ts" const args = parse(Deno.args, { string: ["src", "out", "import", "dev"], @@ -17,45 +17,45 @@ const args = parse(Deno.args, { out: ["o"], help: ["h", "?"], }, -}); +}) -if (args.help) help(); +if (args.help) help() if (!args.out) { - throw new Error("Must specify `out`"); + throw new Error("Must specify `out`") } -let metadata: C.M.Metadata; +let metadata: C.M.Metadata if (args.src && args.dev) { - throw Error("Cannot specify both `src` and `dev`"); + throw Error("Cannot specify both `src` and `dev`") } else if (args.src) { if (args.src.endsWith(".scale")) { - metadata = C.M.fromPrefixedHex(await Deno.readTextFile(args.src)); + metadata = C.M.fromPrefixedHex(await Deno.readTextFile(args.src)) } else { - const client = C.rpcClient(C.rpc.proxyProvider, args.src); - metadata = U.throwIfError(await C.metadata(client)().run()); + const client = C.rpcClient(C.rpc.proxyProvider, args.src) + metadata = U.throwIfError(await C.metadata(client)().run()) } } else if (args.dev) { if (!T.isRuntimeName(args.dev)) { - throw new T.InvalidRuntimeSpecifiedError(args.dev); + throw new T.InvalidRuntimeSpecifiedError(args.dev) } - const client = T[args.dev as T.RuntimeName]; - metadata = U.throwIfError(await C.metadata(client)().run()); + const client = T[args.dev as T.RuntimeName] + metadata = U.throwIfError(await C.metadata(client)().run()) } else { - throw new Error("Please specify either `src` or `dev`"); + throw new Error("Please specify either `src` or `dev`") } -await run(metadata, args.out); +await run(metadata, args.out) function run(metadata: C.M.Metadata, out: string) { return codegen({ importSpecifier: args.import, metadata, }) - .write(out); + .write(out) } // TODO: do we handle help differently depending on what flags were specified? function help(): never { - console.log("Usage: codegen -s= -o="); - Deno.exit(); + console.log("Usage: codegen -s= -o=") + Deno.exit() } diff --git a/codegen/Files.ts b/codegen/Files.ts index fbdb96035..e97704823 100644 --- a/codegen/Files.ts +++ b/codegen/Files.ts @@ -1,32 +1,32 @@ -import { tsFormatter } from "../deps/dprint.ts"; -import * as path from "../deps/std/path.ts"; -import { S } from "./utils.ts"; +import { tsFormatter } from "../deps/dprint.ts" +import * as path from "../deps/std/path.ts" +import { S } from "./utils.ts" -export type File = { getContent: () => S }; +export type File = { getContent: () => S } export class Files extends Map { async write(outDir: string) { - const errors = []; + const errors = [] try { - await Deno.remove(outDir, { recursive: true }); + await Deno.remove(outDir, { recursive: true }) } catch (e) { if (!(e instanceof Deno.errors.NotFound)) { - throw e; + throw e } } - await Deno.mkdir(outDir, { recursive: true }); + await Deno.mkdir(outDir, { recursive: true }) for (const [relativePath, file] of this.entries()) { - const outputPath = path.join(outDir, relativePath); - const content = S.toString(file.getContent()); + const outputPath = path.join(outDir, relativePath) + const content = S.toString(file.getContent()) try { - const formatted = tsFormatter.formatText("gen.ts", content); - await Deno.writeTextFile(outputPath, formatted); + const formatted = tsFormatter.formatText("gen.ts", content) + await Deno.writeTextFile(outputPath, formatted) } catch (e) { - await Deno.writeTextFile(outputPath, content); - errors.push(e); + await Deno.writeTextFile(outputPath, content) + errors.push(e) } } if (errors.length) { - throw errors; + throw errors } } } diff --git a/codegen/codecVisitor.test.ts b/codegen/codecVisitor.test.ts index 6ebf8d540..f569be4ad 100644 --- a/codegen/codecVisitor.test.ts +++ b/codegen/codecVisitor.test.ts @@ -1,51 +1,51 @@ -import { Codec } from "../deps/scale.ts"; -import * as path from "../deps/std/path.ts"; -import { assertEquals } from "../deps/std/testing/asserts.ts"; -import * as M from "../frame_metadata/mod.ts"; -import * as C from "../mod.ts"; -import * as testClients from "../test_util/clients/mod.ts"; -import * as U from "../util/mod.ts"; -import { codegen } from "./mod.ts"; +import { Codec } from "../deps/scale.ts" +import * as path from "../deps/std/path.ts" +import { assertEquals } from "../deps/std/testing/asserts.ts" +import * as M from "../frame_metadata/mod.ts" +import * as C from "../mod.ts" +import * as testClients from "../test_util/clients/mod.ts" +import * as U from "../util/mod.ts" +import { codegen } from "./mod.ts" -const currentDir = path.dirname(path.fromFileUrl(import.meta.url)); -const codegenTestDir = path.join(currentDir, "../target/codegen"); +const currentDir = path.dirname(path.fromFileUrl(import.meta.url)) +const codegenTestDir = path.join(currentDir, "../target/codegen") for (const [runtime, client] of Object.entries(testClients)) { Deno.test(runtime, async () => { - const metadata = U.throwIfError(await C.metadata(client)().run()); - const outDir = path.join(codegenTestDir, runtime); + const metadata = U.throwIfError(await C.metadata(client)().run()) + const outDir = path.join(codegenTestDir, runtime) await codegen({ importSpecifier: "../../../mod.ts", metadata, - }).write(outDir); - const codegened = await import(path.toFileUrl(path.join(outDir, "mod.ts")).toString()); - const deriveCodec = M.DeriveCodec(metadata.tys); - const derivedCodecs = metadata.tys.map(deriveCodec); - const codegenCodecs = codegened._metadata.types; - const origInspect = Codec.prototype["_inspect"]!; - let inspecting = 0; + }).write(outDir) + const codegened = await import(path.toFileUrl(path.join(outDir, "mod.ts")).toString()) + const deriveCodec = M.DeriveCodec(metadata.tys) + const derivedCodecs = metadata.tys.map(deriveCodec) + const codegenCodecs = codegened._metadata.types + const origInspect = Codec.prototype["_inspect"]! + let inspecting = 0 Codec.prototype["_inspect"] = function(inspect) { if (inspecting) { - const di = derivedCodecs.indexOf(this); - if (di !== -1) return "$" + di; - const ci = codegenCodecs.indexOf(this); - if (ci !== -1) return "$" + ci; + const di = derivedCodecs.indexOf(this) + if (di !== -1) return "$" + di + const ci = codegenCodecs.indexOf(this) + if (ci !== -1) return "$" + ci } - inspecting++; + inspecting++ try { - return origInspect.call(this, inspect); + return origInspect.call(this, inspect) } finally { - inspecting--; + inspecting-- } - }; + } for (let i = 0; i < derivedCodecs.length; i++) { if ( Deno.inspect(derivedCodecs[i], { depth: Infinity }) !== Deno.inspect(codegenCodecs[i], { depth: Infinity }) ) { - assertEquals(derivedCodecs[i], codegenCodecs[i]); + assertEquals(derivedCodecs[i], codegenCodecs[i]) } } - Codec.prototype["_inspect"] = origInspect; - }); + Codec.prototype["_inspect"] = origInspect + }) } diff --git a/codegen/codecVisitor.ts b/codegen/codecVisitor.ts index e1a69d82d..19006e97c 100644 --- a/codegen/codecVisitor.ts +++ b/codegen/codecVisitor.ts @@ -1,7 +1,7 @@ -import * as M from "../frame_metadata/mod.ts"; -import { Files } from "./Files.ts"; -import { CodegenProps } from "./mod.ts"; -import { Decl, getCodecPath, getName, getRawCodecPath, S } from "./utils.ts"; +import * as M from "../frame_metadata/mod.ts" +import { Files } from "./Files.ts" +import { CodegenProps } from "./mod.ts" +import { Decl, getCodecPath, getName, getRawCodecPath, S } from "./utils.ts" export function createCodecVisitor( props: CodegenProps, @@ -9,9 +9,9 @@ export function createCodecVisitor( typeVisitor: M.TyVisitor, files: Files, ) { - const { tys } = props.metadata; - const namespaceImports = new Set(); - const codecs: S[] = []; + const { tys } = props.metadata + const namespaceImports = new Set() + const codecs: S[] = [] files.set("codecs.ts", { getContent: () => [ "\n", @@ -26,17 +26,17 @@ export function createCodecVisitor( S.array(props.metadata.tys.map((ty) => getName(getRawCodecPath(ty)))), ], ], - }); + }) return new M.TyVisitor(tys, { unitStruct(ty) { - return addCodecDecl(ty, "$null"); + return addCodecDecl(ty, "$null") }, wrapperStruct(ty, inner) { - return addCodecDecl(ty, this.visit(inner)); + return addCodecDecl(ty, this.visit(inner)) }, tupleStruct(ty, members) { - return addCodecDecl(ty, ["$.tuple(", members.map((x) => [this.visit(x), ","]), ")"]); + return addCodecDecl(ty, ["$.tuple(", members.map((x) => [this.visit(x), ","]), ")"]) }, objectStruct(ty) { return addCodecDecl( @@ -48,10 +48,10 @@ export function createCodecVisitor( ), ")", ], - ); + ) }, option(ty, some) { - return addCodecDecl(ty, ["$.option(", this.visit(some), ")"]); + return addCodecDecl(ty, ["$.option(", this.visit(some), ")"]) }, result(ty, ok, err) { return addCodecDecl(ty, ["$.result(", this.visit(ok), ",", [ @@ -60,17 +60,17 @@ export function createCodecVisitor( `>, ["value", `, this.visit(err), "])", - ], ")"]); + ], ")"]) }, never(ty) { - return addCodecDecl(ty, "$.never"); + return addCodecDecl(ty, "$.never") }, stringUnion(ty) { return addCodecDecl(ty, [ "$.stringUnion(", S.object(...ty.members.map((x): [S, S] => [x.index, S.string(x.name)])), ")", - ]); + ]) }, taggedUnion(ty) { return addCodecDecl( @@ -79,15 +79,15 @@ export function createCodecVisitor( `$.taggedUnion("type",`, S.object( ...ty.members.map(({ fields, name: type, index }): [S, S] => { - let props: S[]; + let props: S[] if (fields.length === 0) { - props = []; + props = [] } else if (fields[0]!.name === undefined) { // Tuple variant const value = fields.length === 1 ? this.visit(fields[0]!.ty) - : ["$.tuple(", fields.map((f) => [this.visit(f.ty), ","]), ")"]; - props = [S.array([S.string("value"), value])]; + : ["$.tuple(", fields.map((f) => [this.visit(f.ty), ","]), ")"] + props = [S.array([S.string("value"), value])] } else { // Object variant props = fields.map((field) => @@ -95,57 +95,57 @@ export function createCodecVisitor( S.string(field.name!), this.visit(field.ty), ]) - ); + ) } - return [index, S.array([S.string(type), ...props])]; + return [index, S.array([S.string(type), ...props])] }), ), ")", ], - ); + ) }, uint8Array(ty) { - return addCodecDecl(ty, "$.uint8Array"); + return addCodecDecl(ty, "$.uint8Array") }, array(ty) { - return addCodecDecl(ty, ["$.array(", this.visit(ty.typeParam), ")"]); + return addCodecDecl(ty, ["$.array(", this.visit(ty.typeParam), ")"]) }, sizedUint8Array(ty) { - return addCodecDecl(ty, `$.sizedUint8Array(${ty.len})`); + return addCodecDecl(ty, `$.sizedUint8Array(${ty.len})`) }, sizedArray(ty) { - return addCodecDecl(ty, ["$.sizedArray(", this.visit(ty.typeParam), ",", ty.len, ")"]); + return addCodecDecl(ty, ["$.sizedArray(", this.visit(ty.typeParam), ",", ty.len, ")"]) }, primitive(ty) { - return addCodecDecl(ty, getCodecPath(tys, ty)!); + return addCodecDecl(ty, getCodecPath(tys, ty)!) }, compact(ty) { - return addCodecDecl(ty, ["$.compact(", this.visit(ty.typeParam), ")"]); + return addCodecDecl(ty, ["$.compact(", this.visit(ty.typeParam), ")"]) }, bitSequence(ty) { - return addCodecDecl(ty, "$.bitSequence"); + return addCodecDecl(ty, "$.bitSequence") }, map(ty, key, val) { - return addCodecDecl(ty, ["$.map(", this.visit(key), ",", this.visit(val), ")"]); + return addCodecDecl(ty, ["$.map(", this.visit(key), ",", this.visit(val), ")"]) }, set(ty, val) { - return addCodecDecl(ty, ["$.set(", this.visit(val), ")"]); + return addCodecDecl(ty, ["$.set(", this.visit(val), ")"]) }, era(ty) { - return addCodecDecl(ty, "$era"); + return addCodecDecl(ty, "$era") }, lenPrefixedWrapper(ty, inner) { - return addCodecDecl(ty, ["$.lenPrefixed(", this.visit(inner), ")"]); + return addCodecDecl(ty, ["$.lenPrefixed(", this.visit(inner), ")"]) }, circular(ty) { - return ["$.deferred(() =>", getName(getRawCodecPath(ty)), ")"]; + return ["$.deferred(() =>", getName(getRawCodecPath(ty)), ")"] }, - }); + }) function addCodecDecl(ty: M.Ty, value: S) { - const rawPath = getRawCodecPath(ty); + const rawPath = getRawCodecPath(ty) if (ty.path.length > 1) { - namespaceImports.add(ty.path[0]!); + namespaceImports.add(ty.path[0]!) } codecs.push([ ["export const", getName(rawPath)], @@ -153,8 +153,8 @@ export function createCodecVisitor( fixType(typeVisitor.visit(ty)), "> =", value, - ]); - const path = getCodecPath(tys, ty); + ]) + const path = getCodecPath(tys, ty) // Deduplicate -- metadata has redundant entries (e.g. pallet_collective::RawOrigin) if (path !== rawPath && path !== value && !decls.some((x) => x.path === path)) { decls.push({ @@ -166,9 +166,9 @@ export function createCodecVisitor( "> =", rawPath, ], - }); + }) } - return getName(rawPath); + return getName(rawPath) } /** @@ -180,6 +180,6 @@ export function createCodecVisitor( // Matches paths (`a.b.c`) that either contain a `.`, or are a number type (either `u123` or `Compact`) /\b([\w\$]+\.[\w\.$]+|u\d+|Compact)\b/g, (x) => "t." + x, - ); + ) } } diff --git a/codegen/genMetadata.ts b/codegen/genMetadata.ts index 42d6006f3..3aa11981c 100644 --- a/codegen/genMetadata.ts +++ b/codegen/genMetadata.ts @@ -1,13 +1,13 @@ -import * as M from "../frame_metadata/mod.ts"; -import { Decl, getPath, getRawCodecPath, makeDocComment, S } from "./utils.ts"; +import * as M from "../frame_metadata/mod.ts" +import { Decl, getPath, getRawCodecPath, makeDocComment, S } from "./utils.ts" export function genMetadata(metadata: M.Metadata, decls: Decl[]) { - const { tys, extrinsic, pallets } = metadata; + const { tys, extrinsic, pallets } = metadata const isUnitVisitor = new M.TyVisitor(tys, { unitStruct: () => true, wrapperStruct(_, inner) { - return this.visit(inner); + return this.visit(inner) }, tupleStruct: () => false, objectStruct: () => false, @@ -23,7 +23,7 @@ export function genMetadata(metadata: M.Metadata, decls: Decl[]) { bitSequence: () => false, lenPrefixedWrapper: () => false, circular: () => false, - }); + }) decls.push({ path: "_metadata.extrinsic", @@ -38,7 +38,7 @@ export function genMetadata(metadata: M.Metadata, decls: Decl[]) { ], ), ], - }); + }) for (const pallet of pallets) { for (const entry of pallet.storage?.entries ?? []) { decls.push({ @@ -64,13 +64,13 @@ export function genMetadata(metadata: M.Metadata, decls: Decl[]) { ["value", getRawCodecPath(entry.value)], ), ], - }); + }) } if (pallet.calls) { - const ty = pallet.calls as M.Ty & M.UnionTyDef; - const isStringUnion = ty.members.every((x) => !x.fields.length); + const ty = pallet.calls as M.Ty & M.UnionTyDef + const isStringUnion = ty.members.every((x) => !x.fields.length) for (const call of ty.members) { - const typeName = isStringUnion ? S.string(call.name) : getPath(tys, ty)! + "." + call.name; + const typeName = isStringUnion ? S.string(call.name) : getPath(tys, ty)! + "." + call.name const [params, data]: [S, S] = call.fields.length ? call.fields[0]!.name ? [`value: Omit<${typeName}, "type">`, ["{ ...value, type:", S.string(call.name), "}"]] @@ -79,7 +79,7 @@ export function genMetadata(metadata: M.Metadata, decls: Decl[]) { S.string(call.name), "}", ]] - : ["", isStringUnion ? S.string(call.name) : S.object(["type", S.string(call.name)])]; + : ["", isStringUnion ? S.string(call.name) : S.object(["type", S.string(call.name)])] decls.push({ path: `${pallet.name}.${call.name}`, code: [ @@ -90,7 +90,7 @@ export function genMetadata(metadata: M.Metadata, decls: Decl[]) { [":", typeName], ["{ return", data, "}"], ], - }); + }) } } } @@ -98,11 +98,11 @@ export function genMetadata(metadata: M.Metadata, decls: Decl[]) { decls.push({ path: "_metadata.types", code: "export const types = _codec._all", - }); + }) function getExtrasCodec(xs: [string, M.Ty][]) { return S.array( xs.filter((x) => !isUnitVisitor.visit(x[1])).map((x) => getRawCodecPath(x[1])), - ); + ) } } diff --git a/codegen/mod.ts b/codegen/mod.ts index de6f8e277..b87efad97 100644 --- a/codegen/mod.ts +++ b/codegen/mod.ts @@ -1,18 +1,18 @@ -import * as M from "../frame_metadata/mod.ts"; -import { createCodecVisitor } from "./codecVisitor.ts"; -import { Files } from "./Files.ts"; -import { genMetadata } from "./genMetadata.ts"; -import { createTypeVisitor } from "./typeVisitor.ts"; -import { Decl, printDecls, S } from "./utils.ts"; +import * as M from "../frame_metadata/mod.ts" +import { createCodecVisitor } from "./codecVisitor.ts" +import { Files } from "./Files.ts" +import { genMetadata } from "./genMetadata.ts" +import { createTypeVisitor } from "./typeVisitor.ts" +import { Decl, printDecls, S } from "./utils.ts" export interface CodegenProps { - metadata: M.Metadata; - importSpecifier: string; + metadata: M.Metadata + importSpecifier: string } export function codegen(props: CodegenProps): Files { - const decls: Decl[] = []; - const files = new Files(); + const decls: Decl[] = [] + const files = new Files() decls.push({ path: "_", code: [ @@ -24,16 +24,16 @@ export function codegen(props: CodegenProps): Files { [`import * as _codec from "./codecs.ts"`], [`export { _metadata }`], ], - }); - const typeVisitor = createTypeVisitor(props, decls); - const codecVisitor = createCodecVisitor(props, decls, typeVisitor, files); + }) + const typeVisitor = createTypeVisitor(props, decls) + const codecVisitor = createCodecVisitor(props, decls, typeVisitor, files) for (const ty of props.metadata.tys) { - typeVisitor.visit(ty); - codecVisitor.visit(ty); + typeVisitor.visit(ty) + codecVisitor.visit(ty) } - genMetadata(props.metadata, decls); + genMetadata(props.metadata, decls) files.set("mod.ts", { getContent: () => printDecls(decls), - }); - return files; + }) + return files } diff --git a/codegen/typeVisitor.ts b/codegen/typeVisitor.ts index 21216c9a9..12467b222 100644 --- a/codegen/typeVisitor.ts +++ b/codegen/typeVisitor.ts @@ -1,19 +1,19 @@ -import * as M from "../frame_metadata/mod.ts"; -import { CodegenProps } from "./mod.ts"; -import { Decl, getName, getPath, makeDocComment, S } from "./utils.ts"; +import * as M from "../frame_metadata/mod.ts" +import { CodegenProps } from "./mod.ts" +import { Decl, getName, getPath, makeDocComment, S } from "./utils.ts" export function createTypeVisitor(props: CodegenProps, decls: Decl[]) { - const { tys } = props.metadata; + const { tys } = props.metadata return new M.TyVisitor(tys, { unitStruct(ty) { - return addTypeDecl(ty, "null"); + return addTypeDecl(ty, "null") }, wrapperStruct(ty, inner) { - if (ty.path[0] === "Cow") return this.visit(inner); - return addTypeDecl(ty, this.visit(inner)); + if (ty.path[0] === "Cow") return this.visit(inner) + return addTypeDecl(ty, this.visit(inner)) }, tupleStruct(ty, members) { - return addTypeDecl(ty, S.array(members.map((x) => this.visit(x)))); + return addTypeDecl(ty, S.array(members.map((x) => this.visit(x)))) }, objectStruct(ty) { return addInterfaceDecl( @@ -23,45 +23,45 @@ export function createTypeVisitor(props: CodegenProps, decls: Decl[]) { (x) => [makeDocComment(x.docs), x.name!, this.visit(x.ty)] as const, ), ), - ); + ) }, option(_ty, some) { - return [this.visit(some), "| undefined"]; + return [this.visit(some), "| undefined"] }, result(_ty, ok, err) { - return [this.visit(ok), "|", ["ChainError<", this.visit(err), ">"]]; + return [this.visit(ok), "|", ["ChainError<", this.visit(err), ">"]] }, never(ty) { - return addTypeDecl(ty, "never"); + return addTypeDecl(ty, "never") }, stringUnion(ty) { - return addTypeDecl(ty, [ty.members.map((x) => ["|", S.string(x.name)])]); + return addTypeDecl(ty, [ty.members.map((x) => ["|", S.string(x.name)])]) }, taggedUnion(ty) { - const path = getPath(tys, ty)!; - const name = getName(path); + const path = getPath(tys, ty)! + const name = getName(path) decls.push({ path, code: [ makeDocComment(ty.docs), ["export type", name, "="], ty.members.map(({ fields, name: type, docs }) => { - let props: [comment: S, name: S, type: S][]; + let props: [comment: S, name: S, type: S][] if (fields.length === 0) { - props = []; + props = [] } else if (fields[0]!.name === undefined) { // Tuple variant const value = fields.length === 1 ? this.visit(fields[0]!.ty) - : S.array(fields.map((f) => this.visit(f.ty))); - props = [["", "value", value]]; + : S.array(fields.map((f) => this.visit(f.ty))) + props = [["", "value", value]] } else { // Object variant props = fields.map((field, i) => [ makeDocComment(field.docs), field.name || i, this.visit(field.ty), - ]); + ]) } decls.push({ path: path + "." + type, @@ -73,75 +73,75 @@ export function createTypeVisitor(props: CodegenProps, decls: Decl[]) { ...props, ), ], - }); - return ["|", path, ".", type]; + }) + return ["|", path, ".", type] }), ], - }); - return path; + }) + return path }, uint8Array(ty) { - return addTypeDecl(ty, "Uint8Array"); + return addTypeDecl(ty, "Uint8Array") }, array(ty) { - return addTypeDecl(ty, ["Array<", this.visit(ty.typeParam), ">"]); + return addTypeDecl(ty, ["Array<", this.visit(ty.typeParam), ">"]) }, sizedUint8Array(ty) { - return addTypeDecl(ty, "Uint8Array"); // TODO: consider `& { length: L }` + return addTypeDecl(ty, "Uint8Array") // TODO: consider `& { length: L }` }, sizedArray(ty) { - return addTypeDecl(ty, S.array(Array(ty.len).fill(this.visit(ty.typeParam)))); + return addTypeDecl(ty, S.array(Array(ty.len).fill(this.visit(ty.typeParam)))) }, primitive(ty) { - if (ty.kind === "char") return addTypeDecl(ty, "string"); - if (ty.kind === "bool") return "boolean"; - if (ty.kind === "str") return "string"; - if (+ty.kind.slice(1) < 64) return addTypeDecl(ty, "number"); - return addTypeDecl(ty, "bigint"); + if (ty.kind === "char") return addTypeDecl(ty, "string") + if (ty.kind === "bool") return "boolean" + if (ty.kind === "str") return "string" + if (+ty.kind.slice(1) < 64) return addTypeDecl(ty, "number") + return addTypeDecl(ty, "bigint") }, compact(ty) { - decls.push({ path: "Compact", code: "export type Compact = T" }); - return ["Compact<", this.visit(ty.typeParam), ">"]; + decls.push({ path: "Compact", code: "export type Compact = T" }) + return ["Compact<", this.visit(ty.typeParam), ">"] }, bitSequence(ty) { - return addTypeDecl(ty, "BitSequence"); + return addTypeDecl(ty, "BitSequence") }, map(_ty, key, val) { - return ["Map<", this.visit(key), ",", this.visit(val), ">"]; + return ["Map<", this.visit(key), ",", this.visit(val), ">"] }, set(_ty, val) { - return ["Set<", this.visit(val), ">"]; + return ["Set<", this.visit(val), ">"] }, era() { - return "Era"; + return "Era" }, lenPrefixedWrapper(_ty, inner) { - return this.visit(inner); + return this.visit(inner) }, circular(ty) { - return getPath(tys, ty) || this._visit(ty); + return getPath(tys, ty) || this._visit(ty) }, - }); + }) function addTypeDecl(ty: M.Ty, value: S) { - const path = getPath(tys, ty); + const path = getPath(tys, ty) if (path && path !== value) { decls.push({ path, code: [makeDocComment(ty.docs), ["export type", getName(path)], "=", value], - }); + }) } - return path || value; + return path || value } function addInterfaceDecl(ty: M.Ty, value: S) { - const path = getPath(tys, ty); + const path = getPath(tys, ty) if (path && path !== value) { decls.push({ path, code: [makeDocComment(ty.docs), ["export interface", getName(path)], value], - }); + }) } - return path || value; + return path || value } } diff --git a/codegen/utils.ts b/codegen/utils.ts index 9dbfbfad0..25899b89d 100644 --- a/codegen/utils.ts +++ b/codegen/utils.ts @@ -1,90 +1,90 @@ -import * as M from "../frame_metadata/mod.ts"; +import * as M from "../frame_metadata/mod.ts" -export type S = string | number | S[]; +export type S = string | number | S[] export namespace S { export function array(items: S[]): S { - return ["[", items.map((x) => [x, ","]), "]"]; + return ["[", items.map((x) => [x, ","]), "]"] } export function object( ...items: (readonly [doc: S, prop: S, val: S] | readonly [prop: S, val: S])[] ): S { - return ["{", items.map((x) => [x.slice(0, -1), ":", x.at(-1)!, ","]), "}"]; + return ["{", items.map((x) => [x.slice(0, -1), ":", x.at(-1)!, ","]), "}"] } export function string(value: string): S { - return JSON.stringify(value); + return JSON.stringify(value) } export function toString(value: S): string { - if (!(value instanceof Array)) return value.toString(); - const parts = value.map(S.toString); - return parts.map((x) => x.trim()).join(parts.some((x) => x.includes("\n")) ? "\n" : " ").trim(); + if (!(value instanceof Array)) return value.toString() + const parts = value.map(S.toString) + return parts.map((x) => x.trim()).join(parts.some((x) => x.includes("\n")) ? "\n" : " ").trim() } } -export type Decl = { path: string; code: S }; +export type Decl = { path: string; code: S } export function getPath(tys: M.Ty[], ty: M.Ty): string | null { - if (ty.type === "Struct" && ty.fields.length === 1 && ty.params.length) return null; - return _getName(ty); + if (ty.type === "Struct" && ty.fields.length === 1 && ty.params.length) return null + return _getName(ty) function _getName(ty: M.Ty): string | null { if (ty.type === "Primitive") { - return ty.kind; + return ty.kind } if (ty.type === "Compact") { - return null; + return null } - if (ty.path.at(-1) === "Era") return "Era"; - if (["Option", "Result", "Cow", "BTreeMap", "BTreeSet"].includes(ty.path[0]!)) return null; - const baseName = ty.path.join("."); - if (!baseName) return null; + if (ty.path.at(-1) === "Era") return "Era" + if (["Option", "Result", "Cow", "BTreeMap", "BTreeSet"].includes(ty.path[0]!)) return null + const baseName = ty.path.join(".") + if (!baseName) return null return baseName + ty.params.map((p, i) => { - if (p.ty === undefined) return ""; + if (p.ty === undefined) return "" if (tys.every((x) => x.path.join(".") !== baseName || x.params[i]!.ty === p.ty)) { - return ""; + return "" } - return ".$$" + (_getName(p.ty) ?? p.ty); - }).join(""); + return ".$$" + (_getName(p.ty) ?? p.ty) + }).join("") } } export function getName(path: string) { - return path.split(".").at(-1)!; + return path.split(".").at(-1)! } export function makeDocComment(docs: string[]) { - docs = docs.map((x) => x.replace(/^\s*\n\s*|\s*\n\s*$/, "").replace(/\s*\n\s*/g, " ")); - if (!docs.length) return ""; - if (docs.length === 1) return `/** ${docs[0]!.trim()} */\n`; - return `/**\n * ${docs.join("\n * ")}\n */`; + docs = docs.map((x) => x.replace(/^\s*\n\s*|\s*\n\s*$/, "").replace(/\s*\n\s*/g, " ")) + if (!docs.length) return "" + if (docs.length === 1) return `/** ${docs[0]!.trim()} */\n` + return `/**\n * ${docs.join("\n * ")}\n */` } export function getRawCodecPath(ty: M.Ty) { - return `_codec.$${ty.id}`; + return `_codec.$${ty.id}` } export function getCodecPath(tys: M.Ty[], ty: M.Ty) { if (ty.type === "Primitive") { - return ty.kind === "char" ? "$.str" : "$." + ty.kind; + return ty.kind === "char" ? "$.str" : "$." + ty.kind } - const path = getPath(tys, ty); - if (path === null) return getRawCodecPath(ty); - const parts = path.split("."); + const path = getPath(tys, ty) + if (path === null) return getRawCodecPath(ty) + const parts = path.split(".") return [ ...parts.slice(0, -1), "$" + parts.at(-1)![0]!.toLowerCase() + parts.at(-1)!.slice(1), - ].join("."); + ].join(".") } export function printDecls(decls: Decl[]) { - const namespaces: Record = {}; - const done: Decl[] = []; + const namespaces: Record = {} + const done: Decl[] = [] for (const { path, code } of decls) { if (path.includes(".")) { - const [ns, ...rest] = path.split("."); - (namespaces[ns!] ??= []).push({ path: rest.join("."), code }); + const [ns, ...rest] = path.split(".") + ;(namespaces[ns!] ??= []).push({ path: rest.join("."), code }) } else { - done.push({ path, code }); + done.push({ path, code }) } } for (const ns in namespaces) { @@ -95,7 +95,7 @@ export function printDecls(decls: Decl[]) { printDecls(namespaces[ns]!), "}", ], - }); + }) } // sort by path, _s first done.sort((a, b) => @@ -106,7 +106,7 @@ export function printDecls(decls: Decl[]) { : a.path > b.path ? 1 : 0 - ); + ) // Deduplicate -- metadata has redundant entries (e.g. pallet_collective::RawOrigin) - return [...new Set(done.map((x) => S.toString(x.code)))].join("\n"); + return [...new Set(done.map((x) => S.toString(x.code)))].join("\n") } diff --git a/compat/mod.ts b/compat/mod.ts index 626e76e43..5d64e843c 100644 --- a/compat/mod.ts +++ b/compat/mod.ts @@ -1 +1 @@ -export * from "./pjs.ts"; +export * from "./pjs.ts" diff --git a/compat/pjs.ts b/compat/pjs.ts index 2a67abe0a..688663efc 100644 --- a/compat/pjs.ts +++ b/compat/pjs.ts @@ -1,27 +1,27 @@ -import { KeyringPair } from "../deps/polkadot/keyring/types.ts"; -import { MultiAddress, Signature, Signer } from "../frame_metadata/Extrinsic.ts"; +import { KeyringPair } from "../deps/polkadot/keyring/types.ts" +import { MultiAddress, Signature, Signer } from "../frame_metadata/Extrinsic.ts" export function multiAddressFromKeypair(keypair: KeyringPair): MultiAddress { - return MultiAddress.fromId(keypair.publicKey); + return MultiAddress.fromId(keypair.publicKey) } export function signerFromKeypair(keypair: KeyringPair): Signer { const type = ((): Signature["type"] => { switch (keypair.type) { case "sr25519": { - return "Sr25519"; + return "Sr25519" } case "ed25519": { - return "Ed25519"; + return "Ed25519" } default: { // TODO - return null!; + return null! } } - })(); + })() return (message) => ({ type, value: keypair.sign(message), - }); + }) } diff --git a/deno.jsonc b/deno.jsonc index 1a31e277f..35d98de32 100644 --- a/deno.jsonc +++ b/deno.jsonc @@ -17,7 +17,8 @@ "no-empty-interface", "no-explicit-any", "no-namespace", - "no-empty" + "no-empty", + "no-extra-semi" ], "tags": ["recommended"] } diff --git a/deps/dprint.ts b/deps/dprint.ts index 3259ecf97..0911b2118 100644 --- a/deps/dprint.ts +++ b/deps/dprint.ts @@ -1,10 +1,10 @@ -export * from "https://deno.land/x/dprint@0.2.0/mod.ts"; -import { createStreaming } from "https://deno.land/x/dprint@0.2.0/mod.ts"; +export * from "https://deno.land/x/dprint@0.2.0/mod.ts" +import { createStreaming } from "https://deno.land/x/dprint@0.2.0/mod.ts" export const tsFormatter = await createStreaming( // check https://plugins.dprint.dev/ for latest plugin versions fetch("https://plugins.dprint.dev/typescript-0.71.2.wasm"), -); +) tsFormatter.setConfig({ indentWidth: 2, @@ -12,4 +12,4 @@ tsFormatter.setConfig({ }, { quoteProps: "asNeeded", "arrowFunction.useParentheses": "force", -}); +}) diff --git a/deps/polkadot/keyring.ts b/deps/polkadot/keyring.ts index 8eb52ad1f..41d4ebe14 100644 --- a/deps/polkadot/keyring.ts +++ b/deps/polkadot/keyring.ts @@ -1 +1 @@ -export * from "https://deno.land/x/polkadot@0.0.8/keyring/mod.ts"; +export * from "https://deno.land/x/polkadot@0.0.8/keyring/mod.ts" diff --git a/deps/polkadot/keyring/types.ts b/deps/polkadot/keyring/types.ts index a6aa6a359..085ea4143 100644 --- a/deps/polkadot/keyring/types.ts +++ b/deps/polkadot/keyring/types.ts @@ -1 +1 @@ -export * from "https://deno.land/x/polkadot@0.0.8/keyring/types.ts"; +export * from "https://deno.land/x/polkadot@0.0.8/keyring/types.ts" diff --git a/deps/polkadot/types.ts b/deps/polkadot/types.ts index a100396eb..0998cfd8d 100644 --- a/deps/polkadot/types.ts +++ b/deps/polkadot/types.ts @@ -1 +1 @@ -export * from "https://deno.land/x/polkadot@0.0.8/types/mod.ts"; +export * from "https://deno.land/x/polkadot@0.0.8/types/mod.ts" diff --git a/deps/polkadot/util-crypto.ts b/deps/polkadot/util-crypto.ts index 8994eb4bc..7e454be7f 100644 --- a/deps/polkadot/util-crypto.ts +++ b/deps/polkadot/util-crypto.ts @@ -1 +1 @@ -export * from "https://deno.land/x/polkadot@0.0.8/util-crypto/mod.ts"; +export * from "https://deno.land/x/polkadot@0.0.8/util-crypto/mod.ts" diff --git a/deps/scale.ts b/deps/scale.ts index ccb2a00ea..bef30a194 100644 --- a/deps/scale.ts +++ b/deps/scale.ts @@ -1 +1 @@ -export * from "https://deno.land/x/scale@v0.9.1/mod.ts"; +export * from "https://deno.land/x/scale@v0.9.1/mod.ts" diff --git a/deps/smoldot.ts b/deps/smoldot.ts index ab4d280d1..d72b5cb7e 100644 --- a/deps/smoldot.ts +++ b/deps/smoldot.ts @@ -1 +1 @@ -export * from "https://deno.land/x/smoldot@light-js-deno-v0.7.6/index-deno.js"; +export * from "https://deno.land/x/smoldot@light-js-deno-v0.7.6/index-deno.js" diff --git a/deps/smoldot/client.d.ts b/deps/smoldot/client.d.ts index a89da1966..749e6dbed 100644 --- a/deps/smoldot/client.d.ts +++ b/deps/smoldot/client.d.ts @@ -1 +1 @@ -export * from "https://deno.land/x/smoldot@light-js-deno-v0.7.3/client.d.ts"; +export * from "https://deno.land/x/smoldot@light-js-deno-v0.7.3/client.d.ts" diff --git a/deps/std/async.ts b/deps/std/async.ts index 6f028279e..36f914ea6 100644 --- a/deps/std/async.ts +++ b/deps/std/async.ts @@ -1 +1 @@ -export * from "https://deno.land/std@0.127.0/async/mod.ts"; +export * from "https://deno.land/std@0.127.0/async/mod.ts" diff --git a/deps/std/encoding/base58.ts b/deps/std/encoding/base58.ts index 9718d6b30..e94d0fdd6 100644 --- a/deps/std/encoding/base58.ts +++ b/deps/std/encoding/base58.ts @@ -1 +1 @@ -export * from "https://deno.land/std@0.154.0/encoding/base58.ts"; +export * from "https://deno.land/std@0.154.0/encoding/base58.ts" diff --git a/deps/std/encoding/hex.ts b/deps/std/encoding/hex.ts index a3fa1a30c..4c86806b8 100644 --- a/deps/std/encoding/hex.ts +++ b/deps/std/encoding/hex.ts @@ -1 +1 @@ -export * from "https://deno.land/std@0.127.0/encoding/hex.ts"; +export * from "https://deno.land/std@0.127.0/encoding/hex.ts" diff --git a/deps/std/flags.ts b/deps/std/flags.ts index 54d17b01f..54e3dc053 100644 --- a/deps/std/flags.ts +++ b/deps/std/flags.ts @@ -1 +1 @@ -export * from "https://deno.land/std@0.158.0/flags/mod.ts"; +export * from "https://deno.land/std@0.158.0/flags/mod.ts" diff --git a/deps/std/fs.ts b/deps/std/fs.ts index a7a6e27e8..13b55fcdc 100644 --- a/deps/std/fs.ts +++ b/deps/std/fs.ts @@ -1 +1 @@ -export * from "https://deno.land/std@0.127.0/fs/mod.ts"; +export * from "https://deno.land/std@0.127.0/fs/mod.ts" diff --git a/deps/std/path.ts b/deps/std/path.ts index 6948b2688..bf7351db8 100644 --- a/deps/std/path.ts +++ b/deps/std/path.ts @@ -1 +1 @@ -export * from "https://deno.land/std@0.127.0/path/mod.ts"; +export * from "https://deno.land/std@0.127.0/path/mod.ts" diff --git a/deps/std/testing/asserts.ts b/deps/std/testing/asserts.ts index 8ee9b33a7..ade947594 100644 --- a/deps/std/testing/asserts.ts +++ b/deps/std/testing/asserts.ts @@ -1 +1 @@ -export * from "https://deno.land/std@0.158.0/testing/asserts.ts"; +export * from "https://deno.land/std@0.158.0/testing/asserts.ts" diff --git a/deps/std/testing/snapshot.ts b/deps/std/testing/snapshot.ts index 0db92e1cd..32424c5c7 100644 --- a/deps/std/testing/snapshot.ts +++ b/deps/std/testing/snapshot.ts @@ -1 +1 @@ -export * from "https://deno.land/std@0.158.0/testing/snapshot.ts"; +export * from "https://deno.land/std@0.158.0/testing/snapshot.ts" diff --git a/deps/zones.ts b/deps/zones.ts index 41bd1d90a..9917fb704 100644 --- a/deps/zones.ts +++ b/deps/zones.ts @@ -1 +1 @@ -export * from "https://deno.land/x/zones@v0.1.0-beta.12/mod.ts"; +export * from "https://deno.land/x/zones@v0.1.0-beta.12/mod.ts" diff --git a/docs/Configs.md b/docs/Configs.md index 64aedd94e..d1a796ba6 100644 --- a/docs/Configs.md +++ b/docs/Configs.md @@ -3,13 +3,13 @@ Before interacting with a given chain, we must have a means of finding nodes of that chain. This means of discovery is called a "config." A config can also contain additional values and type information (more on this below). ```ts -import { config as polkadot } from "@capi/polkadot"; +import { config as polkadot } from "@capi/polkadot" ``` Let's use the Polkadot config to read some storage. ```ts -const result = await C.entry(polkadot, "Staking", "ActiveEra").read(); +const result = await C.entry(polkadot, "Staking", "ActiveEra").read() ``` ## Type Safety @@ -21,7 +21,7 @@ The static type of any config can describe accessible RPC server methods and FRA What happens if––in the example above––we accidentally misspell a junction of the storage key? We get an immediate type error. ```ts -const result = await C.entry(polkadot, "Stacking", "ActiveEra").read(); +const result = await C.entry(polkadot, "Stacking", "ActiveEra").read() // ~~~~~~~~~~ // ^ argument of type 'Stacking' is not assignable to parameter of type 'PolkadotPalletName'. ``` @@ -31,7 +31,7 @@ const result = await C.entry(polkadot, "Stacking", "ActiveEra").read(); The same is true for RPC method availability. ```ts -const result = await C.rpcCall(myConfig, "nonexistent_method", []); +const result = await C.rpcCall(myConfig, "nonexistent_method", []) // ~~~~~~~~~~~~~~~~~~~~ // ^ argument of type 'nonexistent_method' is not assignable to parameter of type 'existent_method'. ``` @@ -58,7 +58,7 @@ The generated directory will contain a root `mod.ts`, which re-exports the value We can import and utilize these configs as we would from `capi/known`. ```ts -import { config } from "./my_configs/mod.ts"; +import { config } from "./my_configs/mod.ts" ``` ## Ecosystem Configs @@ -66,5 +66,5 @@ import { config } from "./my_configs/mod.ts"; Proprietors and communities of a given chain may want to take ownership of their configs. Although Capi's typegen encodes all possible constraints from the FRAME metadata, there are further constraints from which users may benefit. ```ts -import { config } from "https://deno.land/x/capi-xyz-chain/mod.ts"; +import { config } from "https://deno.land/x/capi-xyz-chain/mod.ts" ``` diff --git a/docs/Effects.md b/docs/Effects.md index 4a3bc5cbd..276f11af3 100644 --- a/docs/Effects.md +++ b/docs/Effects.md @@ -7,16 +7,16 @@ Let's say we want to write a program that produces a random number. If the numbe ```ts class GtPoint5Error extends Error { constructor(value: number) { - super(`The random number \`${value}\` is greater than \`.5\`.`); + super(`The random number \`${value}\` is greater than \`.5\`.`) } } function getRand(): number { - const rand = Math.random(); + const rand = Math.random() if (rand > .5) { - throw new GtPoint5Error(rand); + throw new GtPoint5Error(rand) } - return rand; + return rand } ``` @@ -65,11 +65,11 @@ function add< b: B, ): Result | Extract> { if (a instanceof Error) { - return a; + return a } else if (b instanceof Error) { - return b; + return b } - return ok(a.ok + b.ok); + return ok(a.ok + b.ok) } ``` @@ -78,23 +78,23 @@ Because the error types of `A` and `B` are generic, we can never truly handle th This is precisely what Capi's effect system––[Zones](https://github.com/paritytech/zones)––enables. ```ts -import * as Z from "zones"; +import * as Z from "zones" const rand = Z.call.fac(() => { - const rand = Math.random(); + const rand = Math.random() if (rand > .5) { - return new GtPoint5Error(rand); + return new GtPoint5Error(rand) } - return rand; -}); + return rand +}) const add = Z.call.fac((a: number, b: number) => { - return a + b; -}); + return a + b +}) -const root = add(rand(), 1); +const root = add(rand(), 1) -const result = Z.runtime().run(root); +const result = Z.runtime().run(root) ``` In this example `result` carries the type `number | GtPoint5Error`, which allows us to discriminate with ease. @@ -114,18 +114,18 @@ Although this may seem overly-complex for such a tiny amount of computation, it In the context of Capi, Effects are used to represent on-chain constructs without actually performing any computation until necessary. As showcased within this project's readme, we can create an effect that represents a key in a map, and then use that effect to represent its corresponding value in the map. This all occurs without any network interaction whatsoever. ```ts -import * as C from "capi"; -import { system } from "./polkadot/frame.ts"; +import * as C from "capi" +import { system } from "./polkadot/frame.ts" -const key = system.account.keys.first; +const key = system.account.keys.first -const value = system.account.get(key); +const value = system.account.get(key) ``` We can compose atomic effects such as these to create complex, multichain interactions that abstract over common use cases. Meanwhile, the underlying effect system appropriately determines the optimal path to execute effects. ```ts -const result = await C.run(value); +const result = await C.run(value) ``` In this example `result` carries a type representing a union of all possible errors (such as `StorageDne`). @@ -139,25 +139,25 @@ We don't want to accidentally allocate all the JS thread's processing to a block Imagine you're forming a derived request, wherein `requestC` accepts the result of `requestA` and `requestB`. ```ts -const a = await requestA(); -const b = await requestB(); -const c = await requestC(a, b); +const a = await requestA() +const b = await requestB() +const c = await requestC(a, b) ``` In this case, we accidentally block on `requestA`, when we could execute it in parallel with `requestB`. ```ts -const a = requestA(); -const b = requestB(); -const c = await requestC(...await Promise.all([a, b])); +const a = requestA() +const b = requestB() +const c = await requestC(...await Promise.all([a, b])) ``` Or perhaps there are parts of our program which can produce repeat versions of a request. ```ts -const makeD = (value: number) => requestD(value); -makeD(Math.random()); -makeD(Math.random()); +const makeD = (value: number) => requestD(value) +makeD(Math.random()) +makeD(Math.random()) ``` If `Math.random()` miraculously returns `.25` twice, do we need to send the request a second time? If `requestD` is idempotent, then no. diff --git a/docs/Quick_Start.md b/docs/Quick_Start.md index 6ff437205..350922712 100644 --- a/docs/Quick_Start.md +++ b/docs/Quick_Start.md @@ -5,7 +5,7 @@ If you're using [Deno](https://deno.land/), import Capi via its [`denoland/x`](https://deno.land/x) URI. ```ts -import * as C from "https://deno.land/x/capi/mod.ts"; +import * as C from "https://deno.land/x/capi/mod.ts" ``` > Note: you may want to pin the version in the import specifier (`"https://deno.land/x/capi@x.x.x/mod.ts"`). @@ -19,7 +19,7 @@ npm install capi Then import as follows. ```ts -import * as C from "capi"; +import * as C from "capi" ``` > The `capi` NPM package contains both ESM & CJS formats, alongside corresponding type definitions. @@ -47,10 +47,10 @@ deno run -A -r https://deno.land/x/capi/codegen.ts \ ## Read the Latest Block ```ts -import * as C from "capi"; -import { block } from "./polkadot/core.ts"; +import * as C from "capi" +import { block } from "./polkadot/core.ts" -const block = await C.run(block.latest); +const block = await C.run(block.latest) ``` ## Reading From Storage @@ -58,14 +58,14 @@ const block = await C.run(block.latest); Let's read from on-chain storage. ```ts -import * as C from "capi"; -import { system } from "./polkadot/frame.ts"; +import * as C from "capi" +import { system } from "./polkadot/frame.ts" // bind to the last inserted key -const key = system.account.keys.first; +const key = system.account.keys.first // bind to the corresponding value -const value = C.run(system.account.get(key)); +const value = C.run(system.account.get(key)) ``` ## Transferring Some Funds @@ -73,10 +73,10 @@ const value = C.run(system.account.get(key)); In the following example, we create and sign an extrinsic that calls the Balance pallet's transfer method. ```ts -import * as C from "capi"; -import { balances } from "./polkadot/frame.ts"; +import * as C from "capi" +import { balances } from "./polkadot/frame.ts" -declare const aliceSigner: C.Signer; +declare const aliceSigner: C.Signer const tx = balances.transfer({ value: 12345n, @@ -84,9 +84,9 @@ const tx = balances.transfer({ }) .signed(aliceSigner) .sent - .finalized; + .finalized -const result = await C.run(tx); +const result = await C.run(tx) ``` ### Observe Transfer Events diff --git a/docs/Reading.md b/docs/Reading.md index 127e4db3e..3beaa5909 100644 --- a/docs/Reading.md +++ b/docs/Reading.md @@ -1,25 +1,25 @@ # Reading ```ts -import * as frame from "./generated/frame.ts"; +import * as frame from "./generated/frame.ts" ``` ## Items ```ts -const result = await chain.timestamp.now; +const result = await chain.timestamp.now ``` ## Maps ```ts -const result = await chain.system.account.get(PUBLIC_KEY); +const result = await chain.system.account.get(PUBLIC_KEY) ``` ## NMaps ```ts -const result = await chain.staking.nominatorSlashInEra.get(123, PUBLIC_KEY); +const result = await chain.staking.nominatorSlashInEra.get(123, PUBLIC_KEY) ``` ### Keys diff --git a/docs/Testing.md b/docs/Testing.md index ce4da3227..42ba9c2f7 100644 --- a/docs/Testing.md +++ b/docs/Testing.md @@ -17,10 +17,10 @@ Let's say you have the following usage. `example.ts` ```ts -import * as C from "capi"; -import { system } from "./polkadot/frame.ts"; +import * as C from "capi" +import { system } from "./polkadot/frame.ts" -const result = await C.run(system.events); +const result = await C.run(system.events) ``` Create an import map. diff --git a/docs/Types.md b/docs/Types.md index 5c54a842e..5e0efa35e 100644 --- a/docs/Types.md +++ b/docs/Types.md @@ -15,7 +15,7 @@ Every FRAME chain exposes metadata about its types and properties. This metadata As always, our first step is to bring Capi into scope. ```ts -import * as C from "https://deno.land/x/capi/mod.ts"; +import * as C from "https://deno.land/x/capi/mod.ts" ``` Now let's fetch the metadata. @@ -23,7 +23,7 @@ Now let's fetch the metadata. ```ts // ... -const metadata = await C.chain(CHAIN_PROXY_WS_URL).metadata.read(); +const metadata = await C.chain(CHAIN_PROXY_WS_URL).metadata.read() ``` If we index into `metadata.pallets`, we'll see a list of all pallet metadata. Each element of this list contains a complete description of the given pallet's storage entries, as well as constants, callables (for creating extrinsics), errors and events. Some fields––such as a pallet's `call` field––point to an index in `metadata.tys`, which contains a complete description of the chain's type-level context. @@ -33,8 +33,8 @@ Let's say we want to learn about the types associated with the `Balances` pallet ```ts // ... -const balancesPallet = metadata.pallets.find((pallet) => pallet.name === "Balances"); -const accountsStorage = balancesPallet?.storage?.entries.find((entry) => entry.name === "Account"); +const balancesPallet = metadata.pallets.find((pallet) => pallet.name === "Balances") +const accountsStorage = balancesPallet?.storage?.entries.find((entry) => entry.name === "Account") ``` On most chains, `accountsStorage` will look similar to the following. @@ -61,7 +61,7 @@ On most chains, `accountsStorage` will look similar to the following. "", " ```nocompile", " impl pallet_balances::Config for Runtime {", - " type AccountStore = StorageMapShim, frame_system::Provider, Acco...", + " type AccountStore = StorageMapShim, frame_system::Provider, ...", " }", " ```", "", @@ -90,7 +90,7 @@ On most chains, `accountsStorage` will look similar to the following. Let's index into `metadata.tys` with the specified key (`0`). ```ts -const keyType = metadata.tys[accountsStorage.key]; +const keyType = metadata.tys[accountsStorage.key] ``` `keyType` should evaluate to something along the lines of: @@ -112,7 +112,7 @@ If we index again into `metadata.tys` with `1` (as specified in the first field) namespace sp_core { export namespace crypto { // Note: `Uint8Array` lengths are untyped in TypeScript - export type AccountId32 = Uint8Array; + export type AccountId32 = Uint8Array } } ``` @@ -120,7 +120,7 @@ namespace sp_core { We can instantiate this as we would any other JS-land value. ```ts -const accountId32 = new Uint8Array(RAW_ADDR_BYTES); +const accountId32 = new Uint8Array(RAW_ADDR_BYTES) ``` We'll cover the TypeScript <-> Rust conversions more in depth [in a later section](#typescript---rust). @@ -131,13 +131,13 @@ Let's now utilize our `accountId32` definition to read a balance. // ... // Which storage map? -const accounts = C.pallet("Balances").storageMap("Account"); +const accounts = C.pallet("Balances").storageMap("Account") // Which key? -const key = accounts.key(accountId32); +const key = accounts.key(accountId32) // Read the value. -const account = await accounts.get(key).read(); +const account = await accounts.get(key).read() ``` What value does this retrieve? How can we deduce this from the FRAME metadata? @@ -145,7 +145,7 @@ What value does this retrieve? How can we deduce this from the FRAME metadata? We can do the same as before, but this time index into `metadata.tys` with the `accountsStorage.value`. ```ts -const valueType = metadata.tys[accountsStorage.value]; +const valueType = metadata.tys[accountsStorage.value] ``` This should give us something along the following lines: @@ -171,10 +171,10 @@ When we follow type `6` (metadata.tys[6]), we see that it represents a `u128`. I ```ts namespace pallet_balances { export interface AccountData { - free: bigint; - reserved: bigint; - misc_frozen: bigint; - fee_frozen: bigint; + free: bigint + reserved: bigint + misc_frozen: bigint + fee_frozen: bigint } } ``` @@ -226,31 +226,31 @@ enum E1 { ```ts -type T0 = null; -type T1 = A; -type T2 = [A, B]; -type T3 = Uint8Array; -type T4 = Uint8Array & { length: n }; -type T5 = A[]; -type T6 = A[] & { length: n }; -type T7 = A | undefined; -type T8 = O | ChainError; - -type S0 = null; -type S1 = A; -type S2 = [A, B]; -type S3 = { a: A }; +type T0 = null +type T1 = A +type T2 = [A, B] +type T3 = Uint8Array +type T4 = Uint8Array & { length: n } +type T5 = A[] +type T6 = A[] & { length: n } +type T7 = A | undefined +type T8 = O | ChainError + +type S0 = null +type S1 = A +type S2 = [A, B] +type S3 = { a: A } type E0 = | "A" | "B" - | "C"; + | "C" type E1 = | { type: "A" } | { type: "B"; value: C } | { type: "D"; value: [E, F] } - | { type: "G"; h: H }; + | { type: "G"; h: H } ``` @@ -275,14 +275,14 @@ There are several ways to unwrap the inner value. The recommended path is to fir if (account instanceof Error) { // Handle errors here. } else { - account.value; // `unknown` + account.value // `unknown` } ``` In situations where convenience is a priority (such as these very docs), we can simply call the `unwrap` method of the result. ```ts -const value = account.unwrap(); +const value = account.unwrap() ``` TODO diff --git a/dprint.json b/dprint.json index 4b6971d0b..9dca9e2f7 100644 --- a/dprint.json +++ b/dprint.json @@ -4,7 +4,8 @@ "lineWidth": 100, "typescript": { "quoteProps": "asNeeded", - "arrowFunction.useParentheses": "force" + "arrowFunction.useParentheses": "force", + "semiColons": "asi" }, "includes": ["**.{dockerfile,json,md,ts}"], "excludes": ["frame_metadata/raw_erc20_metadata.json", "target"], diff --git a/effects/blockWatch.ts b/effects/blockWatch.ts index 86fc4f495..4bd135444 100644 --- a/effects/blockWatch.ts +++ b/effects/blockWatch.ts @@ -1,9 +1,9 @@ -import * as Z from "../deps/zones.ts"; -import * as rpc from "../rpc/mod.ts"; -import * as U from "../util/mod.ts"; -import { chain } from "./rpc_known_methods.ts"; +import * as Z from "../deps/zones.ts" +import * as rpc from "../rpc/mod.ts" +import * as U from "../util/mod.ts" +import { chain } from "./rpc_known_methods.ts" -const k0_ = Symbol(); +const k0_ = Symbol() export function blockWatch>(client: Client) { return < @@ -13,21 +13,21 @@ export function blockWatch>(client: Client) { .ls(listener, Z.env) .next(([listener, env]) => { return async function(this: rpc.ClientSubscribeContext, header: rpc.known.Header) { - const blockHash = chain.getBlockHash(client)(header.number); - const block = await chain.getBlock(client)(blockHash).bind(env)(); + const blockHash = chain.getBlockHash(client)(header.number) + const block = await chain.getBlock(client)(blockHash).bind(env)() // TODO: return error with `this.stop` once implemented - if (block instanceof Error) throw block; - listener.apply({ ...this, env }, [block]); - }; - }, k0_); - const subscriptionId = chain.subscribeNewHeads(client)([], listenerMapped); + if (block instanceof Error) throw block + listener.apply({ ...this, env }, [block]) + } + }, k0_) + const subscriptionId = chain.subscribeNewHeads(client)([], listenerMapped) return chain .unsubscribeNewHeads(client)(subscriptionId) - .zoned("BlockWatch"); - }; + .zoned("BlockWatch") + } } // TODO: generalize creating watch effects + accessing context + halting with a value export interface BlockWatchListenerContext extends rpc.ClientSubscribeContext { - env: Z.Env; + env: Z.Env } diff --git a/effects/const.ts b/effects/const.ts index 55808ff78..2c3df2d3f 100644 --- a/effects/const.ts +++ b/effects/const.ts @@ -1,8 +1,8 @@ -import * as Z from "../deps/zones.ts"; -import * as rpc from "../rpc/mod.ts"; -import * as U from "../util/mod.ts"; -import { constMetadata, metadata, palletMetadata } from "./metadata.ts"; -import * as scale from "./scale.ts"; +import * as Z from "../deps/zones.ts" +import * as rpc from "../rpc/mod.ts" +import * as U from "../util/mod.ts" +import { constMetadata, metadata, palletMetadata } from "./metadata.ts" +import * as scale from "./scale.ts" export function const_>(client: Client) { return < @@ -14,18 +14,18 @@ export function const_>(client: Client) { constName: ConstName, ...[blockHash]: [...Rest] ) => { - const metadata_ = metadata(client)(blockHash); - const deriveCodec_ = scale.deriveCodec(metadata_); - const palletMetadata_ = palletMetadata(metadata_, palletName); - const constMetadata_ = constMetadata(palletMetadata_, constName); - const entryValueTypeI = constMetadata_.access("ty").access("id"); - const constValue = constMetadata_.access("value"); - const $const = scale.codec(deriveCodec_, entryValueTypeI); - return scale.scaleDecoded($const, constValue, "value").zoned("Const"); - }; + const metadata_ = metadata(client)(blockHash) + const deriveCodec_ = scale.deriveCodec(metadata_) + const palletMetadata_ = palletMetadata(metadata_, palletName) + const constMetadata_ = constMetadata(palletMetadata_, constName) + const entryValueTypeI = constMetadata_.access("ty").access("id") + const constValue = constMetadata_.access("value") + const $const = scale.codec(deriveCodec_, entryValueTypeI) + return scale.scaleDecoded($const, constValue, "value").zoned("Const") + } } Object.defineProperty(const_, "name", { value: "const", writable: false, -}); -export { const_ as const }; +}) +export { const_ as const } diff --git a/effects/entryRead.ts b/effects/entryRead.ts index 7c4c74463..bf8b30faa 100644 --- a/effects/entryRead.ts +++ b/effects/entryRead.ts @@ -1,9 +1,9 @@ -import * as Z from "../deps/zones.ts"; -import * as rpc from "../rpc/mod.ts"; -import * as U from "../util/mod.ts"; -import { entryMetadata, metadata, palletMetadata } from "./metadata.ts"; -import { state } from "./rpc_known_methods.ts"; -import * as scale from "./scale.ts"; +import * as Z from "../deps/zones.ts" +import * as rpc from "../rpc/mod.ts" +import * as U from "../util/mod.ts" +import { entryMetadata, metadata, palletMetadata } from "./metadata.ts" +import { state } from "./rpc_known_methods.ts" +import * as scale from "./scale.ts" export function entryRead>(client: Client) { return < @@ -17,16 +17,16 @@ export function entryRead>(client: Client) { keys: [...Keys], ...[blockHash]: [...Rest] ) => { - const metadata_ = metadata(client)(blockHash); - const deriveCodec_ = scale.deriveCodec(metadata_); - const palletMetadata_ = palletMetadata(metadata_, palletName); - const entryMetadata_ = entryMetadata(palletMetadata_, entryName); - const $storageKey_ = scale.$storageKey(deriveCodec_, palletMetadata_, entryMetadata_); - const storageKey = scale.scaleEncoded($storageKey_, Z.ls(...keys)).next(U.hex.encode); - const storageBytesHex = state.getStorage(client)(storageKey, blockHash); - const storageBytes = storageBytesHex.next(U.hex.decode); - const entryValueTypeI = entryMetadata_.access("value"); - const $entry = scale.codec(deriveCodec_, entryValueTypeI); - return scale.scaleDecoded($entry, storageBytes, "value").zoned("EntryRead"); - }; + const metadata_ = metadata(client)(blockHash) + const deriveCodec_ = scale.deriveCodec(metadata_) + const palletMetadata_ = palletMetadata(metadata_, palletName) + const entryMetadata_ = entryMetadata(palletMetadata_, entryName) + const $storageKey_ = scale.$storageKey(deriveCodec_, palletMetadata_, entryMetadata_) + const storageKey = scale.scaleEncoded($storageKey_, Z.ls(...keys)).next(U.hex.encode) + const storageBytesHex = state.getStorage(client)(storageKey, blockHash) + const storageBytes = storageBytesHex.next(U.hex.decode) + const entryValueTypeI = entryMetadata_.access("value") + const $entry = scale.codec(deriveCodec_, entryValueTypeI) + return scale.scaleDecoded($entry, storageBytes, "value").zoned("EntryRead") + } } diff --git a/effects/entryWatch.ts b/effects/entryWatch.ts index ff6f5b22e..69961cf34 100644 --- a/effects/entryWatch.ts +++ b/effects/entryWatch.ts @@ -1,13 +1,13 @@ -import * as Z from "../deps/zones.ts"; -import * as rpc from "../rpc/mod.ts"; -import * as U from "../util/mod.ts"; -import { entryMetadata, metadata, palletMetadata } from "./metadata.ts"; -import { state } from "./rpc_known_methods.ts"; -import * as scale from "./scale.ts"; +import * as Z from "../deps/zones.ts" +import * as rpc from "../rpc/mod.ts" +import * as U from "../util/mod.ts" +import { entryMetadata, metadata, palletMetadata } from "./metadata.ts" +import { state } from "./rpc_known_methods.ts" +import * as scale from "./scale.ts" -export type WatchEntryEvent = [key?: unknown, value?: unknown]; +export type WatchEntryEvent = [key?: unknown, value?: unknown] -const k0_ = Symbol(); +const k0_ = Symbol() export function entryWatch>(client: Client) { return < @@ -20,17 +20,17 @@ export function entryWatch>(client: Client) { keys: Keys, listener: U.Listener, ) => { - const metadata_ = metadata(client)(); - const deriveCodec_ = scale.deriveCodec(metadata_); - const palletMetadata_ = palletMetadata(metadata_, palletName); - const entryMetadata_ = entryMetadata(palletMetadata_, entryName); - const $storageKey_ = scale.$storageKey(deriveCodec_, palletMetadata_, entryMetadata_); - const entryValueTypeI = entryMetadata_.access("value"); - const $entry = scale.codec(deriveCodec_, entryValueTypeI); + const metadata_ = metadata(client)() + const deriveCodec_ = scale.deriveCodec(metadata_) + const palletMetadata_ = palletMetadata(metadata_, palletName) + const entryMetadata_ = entryMetadata(palletMetadata_, entryName) + const $storageKey_ = scale.$storageKey(deriveCodec_, palletMetadata_, entryMetadata_) + const entryValueTypeI = entryMetadata_.access("value") + const $entry = scale.codec(deriveCodec_, entryValueTypeI) const storageKeys = scale .scaleEncoded($storageKey_, keys.length ? [keys] : []) .next(U.hex.encode) - .next(U.tuple); + .next(U.tuple) const listenerMapped = Z.ls($entry, listener).next(([$entry, listener]) => { return function listenerMapped( this: rpc.ClientSubscribeContext, @@ -39,17 +39,17 @@ export function entryWatch>(client: Client) { // TODO: in some cases there might be keys to decode // key ? $storageKey.decode(U.hex.decode(key)) : undefined const getKey = (key: rpc.known.Hex) => { - return key; - }; + return key + } const changes: WatchEntryEvent[] = changeset.changes.map(([key, val]) => { - return [getKey(key), val ? $entry.decode(U.hex.decode(val)) : undefined]; - }); - listener.apply(this, [changes]); - }; - }, k0_); - const subscriptionId = state.subscribeStorage(client)([storageKeys], listenerMapped); + return [getKey(key), val ? $entry.decode(U.hex.decode(val)) : undefined] + }) + listener.apply(this, [changes]) + } + }, k0_) + const subscriptionId = state.subscribeStorage(client)([storageKeys], listenerMapped) return state .unsubscribeStorage(client)(subscriptionId) - .zoned("EntryWatch"); - }; + .zoned("EntryWatch") + } } diff --git a/effects/events.ts b/effects/events.ts index 5319b5646..6bb3be7c8 100644 --- a/effects/events.ts +++ b/effects/events.ts @@ -1,35 +1,35 @@ -import * as Z from "../deps/zones.ts"; -import { known } from "../rpc/mod.ts"; -import { entryRead } from "./entryRead.ts"; -import { SignedExtrinsic } from "./extrinsic.ts"; -import { chain } from "./rpc_known_methods.ts"; +import * as Z from "../deps/zones.ts" +import { known } from "../rpc/mod.ts" +import { entryRead } from "./entryRead.ts" +import { SignedExtrinsic } from "./extrinsic.ts" +import { chain } from "./rpc_known_methods.ts" -const k1_ = Symbol(); -const k2_ = Symbol(); +const k1_ = Symbol() +const k2_ = Symbol() // TODO: attach this to `Extrinsic`? export function events>( extrinsic: Extrinsic, finalizedHash: FinalizedHash, ) { - const client = extrinsic.client as Extrinsic["client"]; + const client = extrinsic.client as Extrinsic["client"] const extrinsics = chain .getBlock(client)(finalizedHash) .access("block") - .access("extrinsics"); + .access("extrinsics") const idx = Z .ls(extrinsics, extrinsic.extrinsicHex as Extrinsic["extrinsicHex"]) .next(([extrinsics, extrinsicHex]) => { - return extrinsics.indexOf(extrinsicHex); - }, k1_); + return extrinsics.indexOf(extrinsicHex) + }, k1_) const events = entryRead(client)("System", "Events", [], finalizedHash) .access("value") - .as<{ phase: { value: number } }[]>(); + .as<{ phase: { value: number } }[]>() return Z .ls(idx, events) .next(([idx, events]) => { return events.filter((event) => { - return event.phase.value === idx; - }); - }, k2_); + return event.phase.value === idx + }) + }, k2_) } diff --git a/effects/extrinsic.test.ts b/effects/extrinsic.test.ts index c96442efe..b500b85a0 100644 --- a/effects/extrinsic.test.ts +++ b/effects/extrinsic.test.ts @@ -1,10 +1,10 @@ -import * as compat from "../compat/mod.ts"; -import { KeyringPair } from "../deps/polkadot/keyring/types.ts"; -import * as A from "../deps/std/testing/asserts.ts"; -import * as T from "../test_util/mod.ts"; -import * as U from "../util/mod.ts"; -import { entryRead } from "./entryRead.ts"; -import { CallData, extrinsic } from "./extrinsic.ts"; +import * as compat from "../compat/mod.ts" +import { KeyringPair } from "../deps/polkadot/keyring/types.ts" +import * as A from "../deps/std/testing/asserts.ts" +import * as T from "../test_util/mod.ts" +import * as U from "../util/mod.ts" +import { entryRead } from "./entryRead.ts" +import { CallData, extrinsic } from "./extrinsic.ts" Deno.test({ name: "Balances.transfer", @@ -19,22 +19,22 @@ Deno.test({ dest: compat.multiAddressFromKeypair(T.bob), }, orderExpectation: ["ready", "inBlock", "finalized"], - }); - }); + }) + }) await ctx.step({ name: "account balance updated", fn: async () => { - const state = await entryRead(T.westend)("System", "Account", [T.bob.publicKey]).run(); + const state = await entryRead(T.westend)("System", "Account", [T.bob.publicKey]).run() A.assertObjectMatch(state, { value: { data: { free: 10000000000012345n }, }, - }); + }) }, - }); + }) }, -}); +}) Deno.test({ name: "Treasury.propose_spend", @@ -49,10 +49,10 @@ Deno.test({ beneficiary: compat.multiAddressFromKeypair(T.bob), }, orderExpectation: ["ready", "inBlock", "finalized"], - }); - }); + }) + }) }, -}); +}) Deno.test({ name: "Democracy.propose", @@ -72,14 +72,14 @@ Deno.test({ value: 2000000000000n, }, orderExpectation: ["ready", "inBlock", "finalized"], - }); - }); + }) + }) }, -}); +}) interface AssertExtrinsicStatusOrderProps extends CallData { - orderExpectation: T.extrinsic.StatusOrderExpectation; - keypair: KeyringPair; + orderExpectation: T.extrinsic.StatusOrderExpectation + keypair: KeyringPair } export async function assertExtrinsicStatusOrder({ @@ -95,6 +95,6 @@ export async function assertExtrinsicStatusOrder({ }) .signed(compat.signerFromKeypair(keypair)), ).run(), - ); - T.extrinsic.assertStatusOrder(extrinsicEvents, orderExpectation); + ) + T.extrinsic.assertStatusOrder(extrinsicEvents, orderExpectation) } diff --git a/effects/extrinsic.ts b/effects/extrinsic.ts index 06bfddf61..004ea77dd 100644 --- a/effects/extrinsic.ts +++ b/effects/extrinsic.ts @@ -1,34 +1,34 @@ -import { unimplemented } from "../deps/std/testing/asserts.ts"; -import * as Z from "../deps/zones.ts"; -import * as M from "../frame_metadata/mod.ts"; -import * as rpc from "../rpc/mod.ts"; -import * as ss58 from "../ss58/mod.ts"; -import * as U from "../util/mod.ts"; -import { const as const_ } from "./const.ts"; -import { metadata } from "./metadata.ts"; -import { author, chain, system } from "./rpc_known_methods.ts"; -import * as scale from "./scale.ts"; +import { unimplemented } from "../deps/std/testing/asserts.ts" +import * as Z from "../deps/zones.ts" +import * as M from "../frame_metadata/mod.ts" +import * as rpc from "../rpc/mod.ts" +import * as ss58 from "../ss58/mod.ts" +import * as U from "../util/mod.ts" +import { const as const_ } from "./const.ts" +import { metadata } from "./metadata.ts" +import { author, chain, system } from "./rpc_known_methods.ts" +import * as scale from "./scale.ts" -const k0_ = Symbol(); +const k0_ = Symbol() export interface CallData { - palletName: string; - methodName: string; - args: Record; + palletName: string + methodName: string + args: Record } export interface ExtrinsicProps extends CallData { - sender: M.MultiAddress; - checkpoint?: U.HexHash; - mortality?: [period: bigint, phase: bigint]; - nonce?: string; - tip?: bigint; + sender: M.MultiAddress + checkpoint?: U.HexHash + mortality?: [period: bigint, phase: bigint] + nonce?: string + tip?: bigint } export function extrinsic>(client: Client) { return >(props: Props): Extrinsic => { - return new Extrinsic(client, props); - }; + return new Extrinsic(client, props) + } } export class Extrinsic< @@ -41,7 +41,7 @@ export class Extrinsic< ) {} signed>(sign: Sign): SignedExtrinsic { - return new SignedExtrinsic(this.client, this.props, sign); + return new SignedExtrinsic(this.client, this.props, sign) } } @@ -50,70 +50,70 @@ export class SignedExtrinsic< Props extends Z.Rec$ = Z.Rec$, Sign extends Z.$ = Z.$, > { - client; - props; - sign; - extrinsicBytes; - extrinsicHex; - extrinsicDecoded; + client + props + sign + extrinsicBytes + extrinsicHex + extrinsicDecoded constructor( client: Client, props: Props, sign: Sign, ) { - this.client = client as Client; - this.props = props as Z.Rec$Access; - this.sign = sign as Sign; + this.client = client as Client + this.props = props as Z.Rec$Access + this.sign = sign as Sign const addrPrefix = const_(this.client)("System", "SS58Prefix") .access("value") - .as(); - const $extrinsic_ = $extrinsic(this.client, this.sign); + .as() + const $extrinsic_ = $extrinsic(this.client, this.sign) const versions = const_(this.client)("System", "Version") - .access("value"); + .access("value") const specVersion = versions - .access("spec_version").as(); + .access("spec_version").as() const transactionVersion = versions - .access("transaction_version").as(); + .access("transaction_version").as() // TODO: create match effect in zones and use here // TODO: MultiAddress conversion utils const senderSs58 = Z.ls(addrPrefix, this.props.sender).next(([addrPrefix, sender]) => { switch (sender.type) { case "Id": { - return ss58.encode(addrPrefix, sender.value); + return ss58.encode(addrPrefix, sender.value) } default: { - unimplemented(); + unimplemented() } } - }, k0_); - const nonce = system.accountNextIndex(this.client)(senderSs58); - const genesisHashBytes = chain.getBlockHash(this.client)(0); - const genesisHash = genesisHashBytes.next(U.hex.decode); + }, k0_) + const nonce = system.accountNextIndex(this.client)(senderSs58) + const genesisHashBytes = chain.getBlockHash(this.client)(0) + const genesisHash = genesisHashBytes.next(U.hex.decode) const checkpointHash = this.props.checkpoint ? Z.option(this.props.checkpoint, U.hex.decode) - : genesisHash; + : genesisHash const mortality = Z .lift(this.props.mortality) .next((mortality) => { return mortality ? M.era.mortal(mortality[0], mortality[1]) - : M.era.immortal; - }); - const extra = Z.ls(mortality, nonce, this.props.tip || 0); - const additional = Z.ls(specVersion, transactionVersion, checkpointHash, genesisHash); - const signature = Z.rec({ address: this.props.sender, extra, additional }); + : M.era.immortal + }) + const extra = Z.ls(mortality, nonce, this.props.tip || 0) + const additional = Z.ls(specVersion, transactionVersion, checkpointHash, genesisHash) + const signature = Z.rec({ address: this.props.sender, extra, additional }) const $extrinsicProps = Z.rec({ protocolVersion: 4, palletName: this.props.palletName, methodName: this.props.methodName, args: this.props.args, signature, - }); - this.extrinsicBytes = scale.scaleEncoded($extrinsic_, $extrinsicProps, true); - this.extrinsicHex = this.extrinsicBytes.next(U.hex.encodePrefixed); - this.extrinsicDecoded = scale.scaleDecoded($extrinsic_, this.extrinsicBytes, "extrinsic"); + }) + this.extrinsicBytes = scale.scaleEncoded($extrinsic_, $extrinsicProps, true) + this.extrinsicHex = this.extrinsicBytes.next(U.hex.encodePrefixed) + this.extrinsicDecoded = scale.scaleDecoded($extrinsic_, this.extrinsicBytes, "extrinsic") } watch>>( @@ -122,13 +122,13 @@ export class SignedExtrinsic< const subscriptionId = author.submitAndWatchExtrinsic(this.client)( [this.extrinsicHex], listener, - ); + ) return author.unwatchExtrinsic(this.client)(subscriptionId) - .zoned("ExtrinsicWatch"); + .zoned("ExtrinsicWatch") } get sent() { - return author.submitExtrinsic(this.client)(this.extrinsicHex); + return author.submitExtrinsic(this.client)(this.extrinsicHex) } } @@ -143,18 +143,18 @@ export function extrinsicsDecoded< return Z .ls($extrinsic(client), hexes) .next(([$extrinsic, hexes]) => { - return hexes.map((hex) => $extrinsic.decode(U.hex.decode(hex))); - }); + return hexes.map((hex) => $extrinsic.decode(U.hex.decode(hex))) + }) } function $extrinsic< Client extends Z.$ = Z.$, Rest extends [sign?: Z.$] = [sign?: Z.$], >(client: Client, ...[sign]: Rest) { - const metadata_ = metadata(client)(); - const deriveCodec_ = scale.deriveCodec(metadata_); + const metadata_ = metadata(client)() + const deriveCodec_ = scale.deriveCodec(metadata_) const addrPrefix = const_(client)("System", "SS58Prefix") .access("value") - .as(); - return scale.$extrinsic(deriveCodec_, metadata_, sign!, addrPrefix); + .as() + return scale.$extrinsic(deriveCodec_, metadata_, sign!, addrPrefix) } diff --git a/effects/keyPageRead.ts b/effects/keyPageRead.ts index ef64d275c..904f8f5a5 100644 --- a/effects/keyPageRead.ts +++ b/effects/keyPageRead.ts @@ -1,11 +1,11 @@ -import * as Z from "../deps/zones.ts"; -import * as rpc from "../rpc/mod.ts"; -import * as U from "../util/mod.ts"; -import { mapMetadata, metadata, palletMetadata } from "./metadata.ts"; -import { state } from "./rpc_known_methods.ts"; -import * as scale from "./scale.ts"; +import * as Z from "../deps/zones.ts" +import * as rpc from "../rpc/mod.ts" +import * as U from "../util/mod.ts" +import { mapMetadata, metadata, palletMetadata } from "./metadata.ts" +import { state } from "./rpc_known_methods.ts" +import * as scale from "./scale.ts" -const k0_ = Symbol(); +const k0_ = Symbol() export function keyPageRead>(client: Client) { return < @@ -21,26 +21,26 @@ export function keyPageRead>(client: Client) { partialKey: [...PartialKey], ...[start, blockHash]: [...Rest] ) => { - const metadata_ = metadata(client)(blockHash as Rest[1]); - const deriveCodec_ = scale.deriveCodec(metadata_); - const palletMetadata_ = palletMetadata(metadata_, palletName); - const entryMetadata_ = mapMetadata(palletMetadata_, entryName); - const $storageKey_ = scale.$storageKey(deriveCodec_, palletMetadata_, entryMetadata_); - const storageKey = scale.scaleEncoded($storageKey_, Z.ls(...partialKey)).next(U.hex.encode); - const startKey = start ? scale.scaleEncoded($storageKey_, []).next(U.hex.encode) : undefined; + const metadata_ = metadata(client)(blockHash as Rest[1]) + const deriveCodec_ = scale.deriveCodec(metadata_) + const palletMetadata_ = palletMetadata(metadata_, palletName) + const entryMetadata_ = mapMetadata(palletMetadata_, entryName) + const $storageKey_ = scale.$storageKey(deriveCodec_, palletMetadata_, entryMetadata_) + const storageKey = scale.scaleEncoded($storageKey_, Z.ls(...partialKey)).next(U.hex.encode) + const startKey = start ? scale.scaleEncoded($storageKey_, []).next(U.hex.encode) : undefined const keysEncoded = state.getKeysPaged(client)( storageKey, count, startKey, blockHash as Rest[1], - ); + ) return Z .ls($storageKey_, keysEncoded) .next(([$key, keysEncoded]) => { return keysEncoded.map((keyEncoded: U.Hex) => { - return $key.decode(U.hex.decode(keyEncoded)); - }); + return $key.decode(U.hex.decode(keyEncoded)) + }) }, k0_) - .zoned("KeyPageRead"); - }; + .zoned("KeyPageRead") + } } diff --git a/effects/metadata.ts b/effects/metadata.ts index 6c7f454d8..6eb75c6a0 100644 --- a/effects/metadata.ts +++ b/effects/metadata.ts @@ -1,15 +1,15 @@ -import * as $ from "../deps/scale.ts"; -import * as Z from "../deps/zones.ts"; -import * as M from "../frame_metadata/mod.ts"; -import * as rpc from "../rpc/mod.ts"; -import * as U from "../util/mod.ts"; -import { state } from "./rpc_known_methods.ts"; +import * as $ from "../deps/scale.ts" +import * as Z from "../deps/zones.ts" +import * as M from "../frame_metadata/mod.ts" +import * as rpc from "../rpc/mod.ts" +import * as U from "../util/mod.ts" +import { state } from "./rpc_known_methods.ts" -const k0_ = Symbol(); -const k1_ = Symbol(); -const k2_ = Symbol(); -const k3_ = Symbol(); -const k4_ = Symbol(); +const k0_ = Symbol() +const k1_ = Symbol() +const k2_ = Symbol() +const k3_ = Symbol() +const k4_ = Symbol() // TODO: callable object so that one doesn't need the extra parens when not specifying block hash? export function metadata>(client: Client) { @@ -18,13 +18,13 @@ export function metadata>(client: Client) { .getMetadata(client)(blockHash) .next((encoded) => { try { - return M.fromPrefixedHex(encoded); + return M.fromPrefixedHex(encoded) } catch (e) { - return e as $.ScaleError; + return e as $.ScaleError } }, k0_) - .zoned("Metadata"); - }; + .zoned("Metadata") + } } export function palletMetadata, PalletName extends Z.$>( @@ -34,9 +34,9 @@ export function palletMetadata, PalletName exte return Z .ls(metadata, palletName) .next(([metadata, palletName]) => { - return M.getPallet(metadata, palletName); + return M.getPallet(metadata, palletName) }, k1_) - .zoned("PalletMetadata"); + .zoned("PalletMetadata") } export function entryMetadata, EntryName extends Z.$>( @@ -46,9 +46,9 @@ export function entryMetadata, EntryName ex return Z .ls(palletMetadata, entryName) .next(([palletMetadata, entryName]) => { - return M.getEntry(palletMetadata, entryName); + return M.getEntry(palletMetadata, entryName) }, k2_) - .zoned("EntryMetadata"); + .zoned("EntryMetadata") } export function constMetadata< @@ -61,9 +61,9 @@ export function constMetadata< return Z .ls(palletMetadata, constName) .next(([palletMetadata, constName]) => { - return M.getConst(palletMetadata, constName); + return M.getConst(palletMetadata, constName) }, k3_) - .zoned("ConstMetadata"); + .zoned("ConstMetadata") } export function mapMetadata, EntryName extends Z.$>( @@ -73,16 +73,16 @@ export function mapMetadata, EntryName exte return Z .ls(palletMetadata, entryName) .next(([palletMetadata, entryName]) => { - const entryMetadata = M.getEntry(palletMetadata, entryName); - if (entryMetadata instanceof Error) return entryMetadata; + const entryMetadata = M.getEntry(palletMetadata, entryName) + if (entryMetadata instanceof Error) return entryMetadata if (entryMetadata.type !== "Map") { - return new ExpectedMapError(); + return new ExpectedMapError() } - return entryMetadata; + return entryMetadata }, k4_) - .zoned("MapMetadata"); + .zoned("MapMetadata") } export class ExpectedMapError extends Error { - override readonly name = "ExpectedMapError"; + override readonly name = "ExpectedMapError" } diff --git a/effects/mod.ts b/effects/mod.ts index e9c5526ac..8cfd4c3e5 100644 --- a/effects/mod.ts +++ b/effects/mod.ts @@ -1,13 +1,13 @@ -export * from "./blockWatch.ts"; -export * from "./const.ts"; -export * from "./entryRead.ts"; -export * from "./entryWatch.ts"; -export * from "./events.ts"; -export * from "./extrinsic.ts"; -export * from "./keyPageRead.ts"; -export * from "./metadata.ts"; -export * from "./rpc.ts"; -export * as knownClients from "./rpc_known_clients.ts"; -export * from "./rpc_known_clients.ts"; -export * from "./rpc_known_methods.ts"; -export * from "./scale.ts"; +export * from "./blockWatch.ts" +export * from "./const.ts" +export * from "./entryRead.ts" +export * from "./entryWatch.ts" +export * from "./events.ts" +export * from "./extrinsic.ts" +export * from "./keyPageRead.ts" +export * from "./metadata.ts" +export * from "./rpc.ts" +export * as knownClients from "./rpc_known_clients.ts" +export * from "./rpc_known_clients.ts" +export * from "./rpc_known_methods.ts" +export * from "./scale.ts" diff --git a/effects/rpc.ts b/effects/rpc.ts index 98b56b61c..f8e0f4054 100644 --- a/effects/rpc.ts +++ b/effects/rpc.ts @@ -1,10 +1,10 @@ -import * as Z from "../deps/zones.ts"; -import * as rpc from "../rpc/mod.ts"; -import * as U from "../util/mod.ts"; +import * as Z from "../deps/zones.ts" +import * as rpc from "../rpc/mod.ts" +import * as U from "../util/mod.ts" -const k0_ = Symbol(); -const k1_ = Symbol(); -const k2_ = Symbol(); +const k0_ = Symbol() +const k1_ = Symbol() +const k2_ = Symbol() export function rpcClient< DiscoveryValue, @@ -19,9 +19,9 @@ export function rpcClient< return Z .ls(provider, discoveryValue) .next(([_, discoveryValue]) => { - return new rpc.Client(provider, discoveryValue as DiscoveryValue); + return new rpc.Client(provider, discoveryValue as DiscoveryValue) }, k0_) - .zoned("RpcClient"); + .zoned("RpcClient") } export function rpcCall(method: string, nonIdempotent?: boolean) { @@ -30,28 +30,28 @@ export function rpcCall(method: string, nonIde return Z .rc(client, method, nonIdempotent, ...params) .next(async ([[client, method, _2, ...params], counter]) => { - type ClientE = typeof client[rpc.ClientE_]; + type ClientE = typeof client[rpc.ClientE_] // TODO: why do we need to explicitly type this / why is this not being inferred? - const id = client.providerRef.nextId(); + const id = client.providerRef.nextId() const result: rpc.ClientCallEvent = await client.call({ jsonrpc: "2.0", id, method, params, - }); - const discardCheckResult = await discardCheck(client, counter); - if (discardCheckResult) return discardCheckResult; + }) + const discardCheckResult = await discardCheck(client, counter) + if (discardCheckResult) return discardCheckResult if (result instanceof Error) { - return result; + return result } else if (result.error) { - return new RpcServerError(result); + return new RpcServerError(result) } - return result.result; + return result.result }, k1_) - .zoned("RpcCall"); - }; - }; + .zoned("RpcCall") + } + } } // TODO: why are leading type params unknown when `extends Z.$Client`? @@ -65,13 +65,13 @@ export function rpcSubscription() { return Z .rc(client, listener, ...params) .next(async ([[client, listener, ...params], counter]) => { - type ClientE = typeof client[rpc.ClientE_]; - const id = client.providerRef.nextId(); + type ClientE = typeof client[rpc.ClientE_] + const id = client.providerRef.nextId() let error: | undefined | RpcServerError | rpc.ProviderSendError - | rpc.ProviderHandlerError; + | rpc.ProviderHandlerError const subscriptionId = await client.subscribe({ jsonrpc: "2.0", id, @@ -79,41 +79,41 @@ export function rpcSubscription() { params, }, function(e) { if (e instanceof Error) { - error = e; - this.stop(); + error = e + this.stop() } else if (e.error) { - error = new RpcServerError(e); - console.log(e); - this.stop(); + error = new RpcServerError(e) + console.log(e) + this.stop() } else { // TODO: halt if returns `Error` | `Promise`? - listener.apply(this, [e.params.result]); + listener.apply(this, [e.params.result]) } - }); - const discardCheckResult = await discardCheck(client, counter); - return discardCheckResult || error || subscriptionId!; + }) + const discardCheckResult = await discardCheck(client, counter) + return discardCheckResult || error || subscriptionId! }, k2_) - .zoned("RpcSubscription"); - }; - }; - }; + .zoned("RpcSubscription") + } + } + } } async function discardCheck( client: rpc.Client, counter: Z.RcCounter, ) { - counter.i--; + counter.i-- if (!counter.i) { - return await client.discard(); + return await client.discard() } - return; + return } export class RpcServerError extends Error { - override readonly name = "RpcServer"; + override readonly name = "RpcServer" constructor(readonly inner: rpc.msg.ErrorMessage) { - super(); + super() } } diff --git a/effects/rpc_known_clients.ts b/effects/rpc_known_clients.ts index f6ae806f5..166565742 100644 --- a/effects/rpc_known_clients.ts +++ b/effects/rpc_known_clients.ts @@ -1,16 +1,16 @@ -import * as rpc from "../rpc/mod.ts"; -import { rpcClient } from "./rpc.ts"; +import * as rpc from "../rpc/mod.ts" +import { rpcClient } from "./rpc.ts" // TODO: do we care to defer effect initialization (not very costly) function proxyClient(url: string) { - return rpcClient(rpc.proxyProvider, url); + return rpcClient(rpc.proxyProvider, url) } -export const polkadot = proxyClient("wss://rpc.polkadot.io"); -export const kusama = proxyClient("wss://kusama-rpc.polkadot.io"); -export const acala = proxyClient("wss://acala-polkadot.api.onfinality.io/public-ws"); -export const rococo = proxyClient("wss://rococo-contracts-rpc.polkadot.io"); -export const moonbeam = proxyClient("wss://wss.api.moonbeam.network"); -export const statemint = proxyClient("wss://statemint-rpc.polkadot.io"); -export const subsocial = proxyClient("wss://para.subsocial.network"); -export const westend = proxyClient("wss://westend-rpc.polkadot.io"); +export const polkadot = proxyClient("wss://rpc.polkadot.io") +export const kusama = proxyClient("wss://kusama-rpc.polkadot.io") +export const acala = proxyClient("wss://acala-polkadot.api.onfinality.io/public-ws") +export const rococo = proxyClient("wss://rococo-contracts-rpc.polkadot.io") +export const moonbeam = proxyClient("wss://wss.api.moonbeam.network") +export const statemint = proxyClient("wss://statemint-rpc.polkadot.io") +export const subsocial = proxyClient("wss://para.subsocial.network") +export const westend = proxyClient("wss://westend-rpc.polkadot.io") diff --git a/effects/rpc_known_methods.ts b/effects/rpc_known_methods.ts index 1cd47207e..7a8dec91e 100644 --- a/effects/rpc_known_methods.ts +++ b/effects/rpc_known_methods.ts @@ -1,56 +1,56 @@ -import { known } from "../rpc/mod.ts"; -import * as U from "../util/mod.ts"; -import { rpcCall, rpcSubscription } from "./rpc.ts"; +import { known } from "../rpc/mod.ts" +import * as U from "../util/mod.ts" +import { rpcCall, rpcSubscription } from "./rpc.ts" // TODO: generate the following? export namespace state { - export const getMetadata = rpcCall<[at?: U.HexHash], U.HexHash>("state_getMetadata"); + export const getMetadata = rpcCall<[at?: U.HexHash], U.HexHash>("state_getMetadata") export const getStorage = rpcCall< [key: known.StorageKey, at?: U.HexHash], known.StorageData - >("state_getStorage", true); + >("state_getStorage", true) export const subscribeStorage = rpcSubscription< [keys: known.StorageKey[]], known.StorageChangeSet >()( "state_subscribeStorage", - ); + ) export const unsubscribeStorage = rpcCall<[subscriptionId: string], true>( "state_unsubscribeStorage", - ); + ) export const getKeysPaged = rpcCall< [prefix: known.StorageKey, count: number, startKey?: known.StorageKey, at?: U.HexHash], known.StorageKey[] - >("state_getKeysPaged"); + >("state_getKeysPaged") } export namespace chain { - export const subscribeNewHeads = rpcSubscription<[], known.Header>()("chain_subscribeNewHeads"); + export const subscribeNewHeads = rpcSubscription<[], known.Header>()("chain_subscribeNewHeads") export const unsubscribeNewHeads = rpcCall<[subscriptionId: string], true>( "chain_unsubscribeNewHeads", - ); - export const getBlock = rpcCall<[hash?: U.HexHash], known.SignedBlock>("chain_getBlock"); + ) + export const getBlock = rpcCall<[hash?: U.HexHash], known.SignedBlock>("chain_getBlock") export const getBlockHash = rpcCall<[height?: known.ListOrValue], U.HexHash>( "chain_getBlockHash", - ); + ) } export namespace system { export const accountNextIndex = rpcCall<[accountId: known.AccountId], number>( "system_accountNextIndex", - ); + ) } export namespace author { export const submitAndWatchExtrinsic = rpcSubscription< [extrinsic: U.Hex], known.TransactionStatus - >()("author_submitAndWatchExtrinsic"); + >()("author_submitAndWatchExtrinsic") export const unwatchExtrinsic = rpcCall<[subscriptionId: string], true>( "author_unwatchExtrinsic", - ); - export const submitExtrinsic = rpcCall<[extrinsic: U.Hex], U.Hash>("author_submitExtrinsic"); + ) + export const submitExtrinsic = rpcCall<[extrinsic: U.Hex], U.Hash>("author_submitExtrinsic") } -export const methods = rpcCall<[], string[]>("rpc_methods"); +export const methods = rpcCall<[], string[]>("rpc_methods") export namespace payment { export const queryInfo = rpcCall<[extrinsic: U.Hex, at?: U.HexHash], known.RuntimeDispatchInfo>( "payment_queryInfo", - ); + ) } diff --git a/effects/scale.ts b/effects/scale.ts index 2954a7493..b4a6e7623 100644 --- a/effects/scale.ts +++ b/effects/scale.ts @@ -1,24 +1,24 @@ -import * as $ from "../deps/scale.ts"; -import * as Z from "../deps/zones.ts"; -import * as M from "../frame_metadata/mod.ts"; +import * as $ from "../deps/scale.ts" +import * as Z from "../deps/zones.ts" +import * as M from "../frame_metadata/mod.ts" -const k0_ = Symbol(); -const k1_ = Symbol(); -const k2_ = Symbol(); -const k3_ = Symbol(); -const k4_ = Symbol(); -const k5_ = Symbol(); +const k0_ = Symbol() +const k1_ = Symbol() +const k2_ = Symbol() +const k3_ = Symbol() +const k4_ = Symbol() +const k5_ = Symbol() export const deriveCodec = Z.call.fac((metadata: M.Metadata) => { - return M.DeriveCodec(metadata.tys); -}, k0_); + return M.DeriveCodec(metadata.tys) +}, k0_) export const codec = Z.call.fac(( deriveCodec: M.DeriveCodec, ty: number | M.Ty, ) => { - return deriveCodec(ty); -}, k1_); + return deriveCodec(ty) +}, k1_) export function scaleDecoded< Codec extends Z.$<$.Codec>, @@ -32,9 +32,9 @@ export function scaleDecoded< return Z .ls(codec, encoded, key) .next(([codec, encoded, key]): Record, any> => { - return { [key]: codec.decode(encoded) } as any; + return { [key]: codec.decode(encoded) } as any }, k2_) - .zoned("ScaleDecoded"); + .zoned("ScaleDecoded") } // TODO: eventually, utilize `V` to toggle runtime validation @@ -47,12 +47,12 @@ export function scaleEncoded>, Decoded>( .ls(codec, decoded, isAsync) .next(([codec, decoded]) => { try { - $.assert(codec, decoded); + $.assert(codec, decoded) } catch (e) { - return e as $.ScaleAssertError; + return e as $.ScaleAssertError } - return codec[isAsync ? "encodeAsync" : "encode"](decoded); - }, k3_); + return codec[isAsync ? "encodeAsync" : "encode"](decoded) + }, k3_) } export const $extrinsic = Z.call.fac(( @@ -66,8 +66,8 @@ export const $extrinsic = Z.call.fac(( metadata, sign: sign!, prefix: prefix!, - }); -}, k4_); + }) +}, k4_) export const $storageKey = Z.call.fac(( deriveCodec: M.DeriveCodec, @@ -78,5 +78,5 @@ export const $storageKey = Z.call.fac(( deriveCodec, pallet, storageEntry, - }); -}, k5_); + }) +}, k5_) diff --git a/examples/all.ts b/examples/all.ts index 4b89e029e..5995dce1f 100644 --- a/examples/all.ts +++ b/examples/all.ts @@ -1,9 +1,9 @@ // This script runs all examples in sequence. We should ultimately delete this script... // ... but it's currently proving useful for local debugging. -const ignore = ["all.ts", (await Deno.readTextFile("examples/.ignore")).split("\n")]; +const ignore = ["all.ts", (await Deno.readTextFile("examples/.ignore")).split("\n")] for await (const item of Deno.readDir("examples")) { if (item.isFile && item.name.endsWith(".ts") && !ignore.includes(item.name)) { - await import(`./${item.name}`); + await import(`./${item.name}`) } } diff --git a/examples/balance.ts b/examples/balance.ts index 49034391c..2227f9e9b 100644 --- a/examples/balance.ts +++ b/examples/balance.ts @@ -1,7 +1,7 @@ -import * as C from "../mod.ts"; -import * as T from "../test_util/mod.ts"; -import * as U from "../util/mod.ts"; +import * as C from "../mod.ts" +import * as T from "../test_util/mod.ts" +import * as U from "../util/mod.ts" -const root = C.entryRead(T.polkadot)("System", "Account", [T.alice.publicKey]); +const root = C.entryRead(T.polkadot)("System", "Account", [T.alice.publicKey]) -console.log(U.throwIfError(await root.run())); +console.log(U.throwIfError(await root.run())) diff --git a/examples/batch.ts b/examples/batch.ts index 324a97d27..54d0a8617 100644 --- a/examples/batch.ts +++ b/examples/batch.ts @@ -1,6 +1,6 @@ -import * as C from "../mod.ts"; -import * as T from "../test_util/mod.ts"; -import * as U from "../util/mod.ts"; +import * as C from "../mod.ts" +import * as T from "../test_util/mod.ts" +import * as U from "../util/mod.ts" // TODO: uncomment these lines / use env upon solving `count` in zones // const getBalances = C.Z.ls( @@ -27,10 +27,10 @@ const tx = C.extrinsic(T.westend)({ }) .signed(C.compat.signerFromKeypair(T.alice)) .watch(function(status) { - console.log(status); + console.log(status) if (C.rpc.known.TransactionStatus.isTerminal(status)) { - this.stop(); + this.stop() } - }); + }) -U.throwIfError(await tx.run()); +U.throwIfError(await tx.run()) diff --git a/examples/derived.ts b/examples/derived.ts index 348767d62..2906f389c 100644 --- a/examples/derived.ts +++ b/examples/derived.ts @@ -1,12 +1,12 @@ -import * as C from "../mod.ts"; -import * as U from "../util/mod.ts"; +import * as C from "../mod.ts" +import * as U from "../util/mod.ts" const ids = C.entryRead(C.polkadot)("Paras", "Parachains", []) .access("value") - .as(); + .as() const root = C.Z.each(ids, (id) => { - return C.entryRead(C.polkadot)("Paras", "Heads", [id]); -}); + return C.entryRead(C.polkadot)("Paras", "Heads", [id]) +}) -console.log(U.throwIfError(await root.run())); +console.log(U.throwIfError(await root.run())) diff --git a/examples/first_ten_keys.ts b/examples/first_ten_keys.ts index ae50a381c..2cc0ab0b1 100644 --- a/examples/first_ten_keys.ts +++ b/examples/first_ten_keys.ts @@ -1,7 +1,7 @@ -import * as C from "../mod.ts"; -import * as T from "../test_util/mod.ts"; -import * as U from "../util/mod.ts"; +import * as C from "../mod.ts" +import * as T from "../test_util/mod.ts" +import * as U from "../util/mod.ts" -const root = C.keyPageRead(T.polkadot)("System", "Account", 10, []); +const root = C.keyPageRead(T.polkadot)("System", "Account", 10, []) -console.log(U.throwIfError(await root.run())); +console.log(U.throwIfError(await root.run())) diff --git a/examples/metadata.ts b/examples/metadata.ts index 1112a1c56..8364a9744 100644 --- a/examples/metadata.ts +++ b/examples/metadata.ts @@ -1,7 +1,7 @@ -import * as C from "../mod.ts"; -import * as T from "../test_util/mod.ts"; -import * as U from "../util/mod.ts"; +import * as C from "../mod.ts" +import * as T from "../test_util/mod.ts" +import * as U from "../util/mod.ts" -const root = C.metadata(T.polkadot)(); +const root = C.metadata(T.polkadot)() -console.log(U.throwIfError(await root.run())); +console.log(U.throwIfError(await root.run())) diff --git a/examples/multisig_transfer.ts b/examples/multisig_transfer.ts index 4d75fc1c8..c324ab6c4 100644 --- a/examples/multisig_transfer.ts +++ b/examples/multisig_transfer.ts @@ -1,25 +1,25 @@ -import { KeyringPair } from "../deps/polkadot/keyring/types.ts"; -import { createKeyMulti } from "../deps/polkadot/util-crypto.ts"; -import * as C from "../mod.ts"; -import * as T from "../test_util/mod.ts"; -import * as U from "../util/mod.ts"; +import { KeyringPair } from "../deps/polkadot/keyring/types.ts" +import { createKeyMulti } from "../deps/polkadot/util-crypto.ts" +import * as C from "../mod.ts" +import * as T from "../test_util/mod.ts" +import * as U from "../util/mod.ts" // FIXME: remove this check once the Zones .bind(env) fix is merged -const hostname = Deno.env.get("TEST_CTX_HOSTNAME"); -const portRaw = Deno.env.get("TEST_CTX_PORT"); +const hostname = Deno.env.get("TEST_CTX_HOSTNAME") +const portRaw = Deno.env.get("TEST_CTX_PORT") if (!hostname || !portRaw) { - throw new Error("Must be running inside a test ctx"); + throw new Error("Must be running inside a test ctx") } -const entryRead = C.entryRead(T.polkadot); -const extrinsic = C.extrinsic(T.polkadot); +const entryRead = C.entryRead(T.polkadot) +const extrinsic = C.extrinsic(T.polkadot) const signatories = T.users .slice(0, 3) .map(({ publicKey }) => publicKey) - .sort(); -const THRESHOLD = 2; -const multisigPublicKey = createKeyMulti(signatories, THRESHOLD); + .sort() +const THRESHOLD = 2 +const multisigPublicKey = createKeyMulti(signatories, THRESHOLD) // Transfer initial balance (existential deposit) to multisig address const existentialDeposit = extrinsic({ @@ -33,42 +33,42 @@ const existentialDeposit = extrinsic({ }) .signed(C.compat.signerFromKeypair(T.alice)) .watch(function(status) { - console.log(`Existential deposit:`, status); + console.log(`Existential deposit:`, status) if (C.rpc.known.TransactionStatus.isTerminal(status)) { - this.stop(); + this.stop() } - }); + }) // First approval root -const proposal = createOrApproveMultisigProposal("Proposal", T.alice); +const proposal = createOrApproveMultisigProposal("Proposal", T.alice) // Get the key of the timepoint const key = C.keyPageRead(T.polkadot)("Multisig", "Multisigs", 1, [multisigPublicKey]) .access(0) - .access(1); + .access(1) // Get the timepoint itself const maybeTimepoint = entryRead("Multisig", "Multisigs", [multisigPublicKey, key]) .access("value") - .access("when"); + .access("when") -const approval = createOrApproveMultisigProposal("Approval", T.bob, maybeTimepoint); +const approval = createOrApproveMultisigProposal("Approval", T.bob, maybeTimepoint) // check T.dave new balance -const daveBalance = entryRead("System", "Account", [T.dave.publicKey]); +const daveBalance = entryRead("System", "Account", [T.dave.publicKey]) // TODO: use common env -U.throwIfError(await existentialDeposit.run()); -U.throwIfError(await proposal.run()); -U.throwIfError(await approval.run()); -console.log(U.throwIfError(await daveBalance.run())); +U.throwIfError(await existentialDeposit.run()) +U.throwIfError(await proposal.run()) +U.throwIfError(await approval.run()) +console.log(U.throwIfError(await daveBalance.run())) // FIXME: weight calculation (`payment.queryInfo(extrinsic, atBlockHash)`) function createOrApproveMultisigProposal< Rest extends [ MaybeTimepoint?: C.Z.$<{ - height: number; - index: number; + height: number + index: number }>, ], >( @@ -101,9 +101,9 @@ function createOrApproveMultisigProposal< }) .signed(C.compat.signerFromKeypair(pair)) .watch(function(status) { - console.log(`${label}:`, status); + console.log(`${label}:`, status) if (C.rpc.known.TransactionStatus.isTerminal(status)) { - this.stop(); + this.stop() } - }); + }) } diff --git a/examples/polkadot_js_signer.ts b/examples/polkadot_js_signer.ts index d9faf2b93..fc6d41903 100644 --- a/examples/polkadot_js_signer.ts +++ b/examples/polkadot_js_signer.ts @@ -1,7 +1,7 @@ -import { TypeRegistry } from "../deps/polkadot/types.ts"; -import * as C from "../mod.ts"; -import * as T from "../test_util/mod.ts"; -import * as U from "../util/mod.ts"; +import { TypeRegistry } from "../deps/polkadot/types.ts" +import * as C from "../mod.ts" +import * as T from "../test_util/mod.ts" +import * as U from "../util/mod.ts" const root = C.extrinsic(T.westend)({ sender: C.compat.multiAddressFromKeypair(T.alice), @@ -14,20 +14,20 @@ const root = C.extrinsic(T.westend)({ }) .signed({ signPayload(payload) { - const tr = new TypeRegistry(); - tr.setSignedExtensions(payload.signedExtensions); + const tr = new TypeRegistry() + tr.setSignedExtensions(payload.signedExtensions) return Promise.resolve( tr .createType("ExtrinsicPayload", payload, { version: payload.version }) .sign(T.alice), - ); + ) }, }) .watch(function(status) { - console.log(status); + console.log(status) if (C.rpc.known.TransactionStatus.isTerminal(status)) { - this.stop(); + this.stop() } - }); + }) -U.throwIfError(await root.run()); +U.throwIfError(await root.run()) diff --git a/examples/raw_rpc_client_call.ts b/examples/raw_rpc_client_call.ts index f7538058e..08b0cfdef 100755 --- a/examples/raw_rpc_client_call.ts +++ b/examples/raw_rpc_client_call.ts @@ -1,6 +1,6 @@ -import * as T from "../test_util/mod.ts"; +import * as T from "../test_util/mod.ts" -const client = await T.westend.client; +const client = await T.westend.client console.log( await client.call({ @@ -9,6 +9,6 @@ console.log( method: "state_getMetadata", params: [], }), -); +) -await client.discard(); +await client.discard() diff --git a/examples/raw_rpc_client_subscription.ts b/examples/raw_rpc_client_subscription.ts index 72e065756..1d9ad3b17 100755 --- a/examples/raw_rpc_client_subscription.ts +++ b/examples/raw_rpc_client_subscription.ts @@ -1,8 +1,8 @@ -import { assertNotInstanceOf } from "../deps/std/testing/asserts.ts"; -import * as T from "../test_util/mod.ts"; -import * as U from "../util/mod.ts"; +import { assertNotInstanceOf } from "../deps/std/testing/asserts.ts" +import * as T from "../test_util/mod.ts" +import * as U from "../util/mod.ts" -const client = await T.polkadot.client; +const client = await T.polkadot.client const subscriptionId = await client.subscribe({ jsonrpc: "2.0", @@ -10,14 +10,14 @@ const subscriptionId = await client.subscribe({ method: "chain_subscribeAllHeads", params: [], }, function(e) { - assertNotInstanceOf(e, Error); - console.log(e); - const counter = this.state(U.Counter); + assertNotInstanceOf(e, Error) + console.log(e) + const counter = this.state(U.Counter) if (counter.i === 2) { - return this.stop(); + return this.stop() } - counter.inc(); -}); + counter.inc() +}) const { result } = U.throwIfError( await client.call({ @@ -26,11 +26,11 @@ const { result } = U.throwIfError( method: "chain_unsubscribeAllHeads", params: [subscriptionId], }), -); +) console.log( // cspell:disable-next-line `${result ? "S" : "Uns"}uccessfully unsubscribed from subscription ${subscriptionId}`, -); +) -await client.discard(); +await client.discard() diff --git a/examples/read_block.ts b/examples/read_block.ts index 62f50c9bd..c2d3c0e08 100644 --- a/examples/read_block.ts +++ b/examples/read_block.ts @@ -1,10 +1,10 @@ -import * as C from "../mod.ts"; -import * as T from "../test_util/mod.ts"; -import * as U from "../util/mod.ts"; +import * as C from "../mod.ts" +import * as T from "../test_util/mod.ts" +import * as U from "../util/mod.ts" const extrinsicsRaw = C.chain.getBlock(C.polkadot)() .access("block") - .access("extrinsics"); -const root = C.extrinsicsDecoded(T.polkadot, extrinsicsRaw); + .access("extrinsics") +const root = C.extrinsicsDecoded(T.polkadot, extrinsicsRaw) -console.log(U.throwIfError(await root.run())); +console.log(U.throwIfError(await root.run())) diff --git a/examples/read_bonded.ts b/examples/read_bonded.ts index a9247e32c..501a5f316 100644 --- a/examples/read_bonded.ts +++ b/examples/read_bonded.ts @@ -1,9 +1,9 @@ -import * as C from "../mod.ts"; -import * as T from "../test_util/mod.ts"; -import * as U from "../util/mod.ts"; +import * as C from "../mod.ts" +import * as T from "../test_util/mod.ts" +import * as U from "../util/mod.ts" -const aliceStash = T.alice.derive("//stash"); +const aliceStash = T.alice.derive("//stash") -const aliceBonded = C.entryRead(T.polkadot)("Staking", "Bonded", [aliceStash.publicKey]); +const aliceBonded = C.entryRead(T.polkadot)("Staking", "Bonded", [aliceStash.publicKey]) -console.log(U.throwIfError(await aliceBonded.run())); +console.log(U.throwIfError(await aliceBonded.run())) diff --git a/examples/read_era_rewards.ts b/examples/read_era_rewards.ts index c0e4f6bcb..ab9abb246 100644 --- a/examples/read_era_rewards.ts +++ b/examples/read_era_rewards.ts @@ -1,10 +1,10 @@ -import * as C from "../mod.ts"; -import * as U from "../util/mod.ts"; +import * as C from "../mod.ts" +import * as U from "../util/mod.ts" const idx = C.entryRead(C.westend)("Staking", "ActiveEra", []) .access("value") - .access("index"); + .access("index") -const eraRewardPoints = C.entryRead(C.westend)("Staking", "ErasRewardPoints", [idx]); +const eraRewardPoints = C.entryRead(C.westend)("Staking", "ErasRewardPoints", [idx]) -console.log(U.throwIfError(await eraRewardPoints.run())); +console.log(U.throwIfError(await eraRewardPoints.run())) diff --git a/examples/read_events.ts b/examples/read_events.ts index eade8dc16..e3de7e30c 100644 --- a/examples/read_events.ts +++ b/examples/read_events.ts @@ -1,6 +1,6 @@ -import * as C from "../mod.ts"; -import * as U from "../util/mod.ts"; +import * as C from "../mod.ts" +import * as U from "../util/mod.ts" -const root = C.entryRead(C.polkadot)("System", "Events", []); +const root = C.entryRead(C.polkadot)("System", "Events", []) -console.log(U.throwIfError(await root.run())); +console.log(U.throwIfError(await root.run())) diff --git a/examples/rpc_call.ts b/examples/rpc_call.ts index b6b49633b..17eac9a23 100644 --- a/examples/rpc_call.ts +++ b/examples/rpc_call.ts @@ -1,6 +1,6 @@ -import * as C from "../mod.ts"; -import * as U from "../util/mod.ts"; +import * as C from "../mod.ts" +import * as U from "../util/mod.ts" -const root = C.rpcCall<[], string[]>("rpc_methods")(C.polkadot)(); +const root = C.rpcCall<[], string[]>("rpc_methods")(C.polkadot)() -console.log(U.throwIfError(await root.run())); +console.log(U.throwIfError(await root.run())) diff --git a/examples/rpc_subscription.ts b/examples/rpc_subscription.ts index 662df974b..08ef459da 100644 --- a/examples/rpc_subscription.ts +++ b/examples/rpc_subscription.ts @@ -1,15 +1,15 @@ -import * as C from "../mod.ts"; -import * as T from "../test_util/mod.ts"; -import * as U from "../util/mod.ts"; +import * as C from "../mod.ts" +import * as T from "../test_util/mod.ts" +import * as U from "../util/mod.ts" const root = C.chain.unsubscribeNewHeads(T.polkadot)( C.chain.subscribeNewHeads(T.polkadot)([], function(header) { - console.log(header); - const counter = this.state(U.Counter); + console.log(header) + const counter = this.state(U.Counter) if (counter.i === 2) { - return this.stop(); + return this.stop() } - counter.inc(); + counter.inc() }), -); -U.throwIfError(await root.run()); +) +U.throwIfError(await root.run()) diff --git a/examples/ticker.ts b/examples/ticker.ts index 597a609cf..53b745a6e 100644 --- a/examples/ticker.ts +++ b/examples/ticker.ts @@ -1,14 +1,14 @@ -import * as C from "../mod.ts"; -import * as T from "../test_util/mod.ts"; -import * as U from "../util/mod.ts"; +import * as C from "../mod.ts" +import * as T from "../test_util/mod.ts" +import * as U from "../util/mod.ts" const root = C.entryWatch(T.polkadot)("Timestamp", "Now", [], function(entry) { - console.log(entry); - const counter = this.state(U.Counter); + console.log(entry) + const counter = this.state(U.Counter) if (counter.i === 2) { - return this.stop(); + return this.stop() } - counter.inc(); -}); + counter.inc() +}) -U.throwIfError(await root.run()); +U.throwIfError(await root.run()) diff --git a/examples/transfer.ts b/examples/transfer.ts index 592702bbf..e95a08bd6 100644 --- a/examples/transfer.ts +++ b/examples/transfer.ts @@ -1,10 +1,10 @@ -import * as C from "../mod.ts"; -import * as T from "../test_util/mod.ts"; -import * as U from "../util/mod.ts"; +import * as C from "../mod.ts" +import * as T from "../test_util/mod.ts" +import * as U from "../util/mod.ts" -let hash: undefined | C.rpc.known.Hash; +let hash: undefined | C.rpc.known.Hash -const env = C.Z.env(); +const env = C.Z.env() const tx = C.extrinsic(T.westend)({ sender: C.compat.multiAddressFromKeypair(T.alice), @@ -15,22 +15,22 @@ const tx = C.extrinsic(T.westend)({ dest: C.compat.multiAddressFromKeypair(T.bob), }, }) - .signed(C.compat.signerFromKeypair(T.alice)); + .signed(C.compat.signerFromKeypair(T.alice)) const runTx = tx .watch(function(status) { - console.log(status); + console.log(status) if (C.rpc.known.TransactionStatus.isTerminal(status)) { // TODO: return this upon implementing `this.stop` - hash = (status as { finalized: C.rpc.known.Hash }).finalized; - this.stop(); + hash = (status as { finalized: C.rpc.known.Hash }).finalized + this.stop() } }) - .bind(env); + .bind(env) const readEvents = C .events(tx, C.Z.call(() => hash!)) - .bind(env); + .bind(env) -U.throwIfError(await runTx()); -console.log(U.throwIfError(await readEvents())); +U.throwIfError(await runTx()) +console.log(U.throwIfError(await readEvents())) diff --git a/examples/watch_blocks.ts b/examples/watch_blocks.ts index c3e368ec4..c3246517d 100644 --- a/examples/watch_blocks.ts +++ b/examples/watch_blocks.ts @@ -1,16 +1,16 @@ -import * as C from "../mod.ts"; -import * as U from "../util/mod.ts"; +import * as C from "../mod.ts" +import * as U from "../util/mod.ts" const root = C.blockWatch(C.polkadot)(async function blockWatchListener({ block }) { const extrinsicsDecoded = C .extrinsicsDecoded(C.polkadot, block.extrinsics) - .bind(this.env); - console.log(await extrinsicsDecoded()); - const counter = this.state(U.Counter); + .bind(this.env) + console.log(await extrinsicsDecoded()) + const counter = this.state(U.Counter) if (counter.i === 2) { - return this.stop(); + return this.stop() } - counter.inc(); -}); + counter.inc() +}) -U.throwIfError(await root.run()); +U.throwIfError(await root.run()) diff --git a/examples/watch_events.ts b/examples/watch_events.ts index 920afa41d..dc5b402eb 100644 --- a/examples/watch_events.ts +++ b/examples/watch_events.ts @@ -1,13 +1,13 @@ -import * as C from "../mod.ts"; -import * as U from "../util/mod.ts"; +import * as C from "../mod.ts" +import * as U from "../util/mod.ts" const root = C.entryWatch(C.rococo)("System", "Events", [], function(entry) { - console.log(entry); - const counter = this.state(U.Counter); + console.log(entry) + const counter = this.state(U.Counter) if (counter.i === 2) { - return this.stop(); + return this.stop() } - counter.inc(); -}); + counter.inc() +}) -U.throwIfError(await root.run()); +U.throwIfError(await root.run()) diff --git a/frame_metadata/Codec.test.ts b/frame_metadata/Codec.test.ts index 396ef7635..a154dfe80 100644 --- a/frame_metadata/Codec.test.ts +++ b/frame_metadata/Codec.test.ts @@ -1,29 +1,29 @@ -import { assertEquals } from "../deps/std/testing/asserts.ts"; -import * as T from "../test_util/mod.ts"; -import * as U from "../util/mod.ts"; -import { ChainError } from "./Codec.ts"; -import { ContractMetadata } from "./Contract.ts"; -import { getPalletAndEntry } from "./Metadata.ts"; -import { setup } from "./test-common.ts"; +import { assertEquals } from "../deps/std/testing/asserts.ts" +import * as T from "../test_util/mod.ts" +import * as U from "../util/mod.ts" +import { ChainError } from "./Codec.ts" +import { ContractMetadata } from "./Contract.ts" +import { getPalletAndEntry } from "./Metadata.ts" +import { setup } from "./test-common.ts" Deno.test("Derive all", async () => { - const [metadata, deriveCodec] = await setup("polkadot"); + const [metadata, deriveCodec] = await setup("polkadot") for (const ty of metadata.tys) { - deriveCodec(ty.id); + deriveCodec(ty.id) } -}); +}) Deno.test("Derive AccountId32 Codec", async () => { - const [_, deriveCodec] = await setup("polkadot"); - const codec = deriveCodec(0); - const encoded = codec.encode(T.alice.publicKey); - assertEquals(encoded, T.alice.publicKey); - assertEquals(codec.decode(encoded), T.alice.publicKey); -}); + const [_, deriveCodec] = await setup("polkadot") + const codec = deriveCodec(0) + const encoded = codec.encode(T.alice.publicKey) + assertEquals(encoded, T.alice.publicKey) + assertEquals(codec.decode(encoded), T.alice.publicKey) +}) Deno.test("Derive AccountInfo Codec", async () => { - const [_, deriveCodec] = await setup("polkadot"); - const codec = deriveCodec(3); + const [_, deriveCodec] = await setup("polkadot") + const codec = deriveCodec(3) const decoded = { nonce: 4, consumers: 1, @@ -35,72 +35,72 @@ Deno.test("Derive AccountInfo Codec", async () => { misc_frozen: 50000000000n, fee_frozen: 0n, }, - }; - const encoded = codec.encode(decoded); - assertEquals(codec.decode(encoded), decoded); -}); + } + const encoded = codec.encode(decoded) + assertEquals(codec.decode(encoded), decoded) +}) Deno.test("Derive Auctions AuctionInfo Storage Entry Codec", async () => { - const [metadata, deriveCodec] = await setup("polkadot"); + const [metadata, deriveCodec] = await setup("polkadot") const auctionInfoStorageEntry = - U.throwIfError(getPalletAndEntry(metadata, "Auctions", "AuctionInfo"))[1]; - const codec = deriveCodec(auctionInfoStorageEntry.value); - const decoded = [8, 9945400]; - const encoded = codec.encode(decoded); - assertEquals(codec.decode(encoded), decoded); -}); + U.throwIfError(getPalletAndEntry(metadata, "Auctions", "AuctionInfo"))[1] + const codec = deriveCodec(auctionInfoStorageEntry.value) + const decoded = [8, 9945400] + const encoded = codec.encode(decoded) + assertEquals(codec.decode(encoded), decoded) +}) Deno.test("Derive Auction Winning Storage Entry Codec", async () => { - const [metadata, deriveCodec] = await setup("polkadot"); + const [metadata, deriveCodec] = await setup("polkadot") const auctionWinningStorageEntry = - U.throwIfError(getPalletAndEntry(metadata, "Auctions", "Winning"))[1]; - const codec = deriveCodec(auctionWinningStorageEntry.value); + U.throwIfError(getPalletAndEntry(metadata, "Auctions", "Winning"))[1] + const codec = deriveCodec(auctionWinningStorageEntry.value) const decoded = [ ...Array(7).fill(undefined), [T.alice.publicKey, 2013, 8672334557167609n], ...Array(28).fill(undefined), - ]; - const encoded = codec.encode(decoded); - assertEquals(codec.decode(encoded), decoded); -}); + ] + const encoded = codec.encode(decoded) + assertEquals(codec.decode(encoded), decoded) +}) Deno.test("Westend circular", async () => { - const [_, deriveCodec] = await setup("westend"); + const [_, deriveCodec] = await setup("westend") // TODO: safeguard against runtime upgrade resulting in future testing of the wrong type - deriveCodec(283); -}); + deriveCodec(283) +}) Deno.test("Derive pallet_xcm::pallet::Error codec", async () => { - const [metadata, deriveCodec] = await setup("polkadot"); - const ty = metadata.tys.find((x) => x.path.join("::") === "pallet_xcm::pallet::Error")!; - const codec = deriveCodec(ty.id); - const encoded = codec.encode("Unreachable"); - assertEquals(encoded, new Uint8Array([0])); - assertEquals(codec.decode(encoded), "Unreachable"); -}); + const [metadata, deriveCodec] = await setup("polkadot") + const ty = metadata.tys.find((x) => x.path.join("::") === "pallet_xcm::pallet::Error")! + const codec = deriveCodec(ty.id) + const encoded = codec.encode("Unreachable") + assertEquals(encoded, new Uint8Array([0])) + assertEquals(codec.decode(encoded), "Unreachable") +}) Deno.test("Derive Result codec", async () => { - const [metadata, deriveCodec] = await setup("polkadot"); + const [metadata, deriveCodec] = await setup("polkadot") const ty = metadata.tys.find((x) => x.path[0] === "Result" && x.params[1]!.ty!.path.join("::") === "sp_runtime::DispatchError" - )!; - const codec = deriveCodec(ty.id); - const ok = null; - const okEncoded = codec.encode(ok); - assertEquals(okEncoded, new Uint8Array([0])); - assertEquals(codec.decode(okEncoded), ok); - const err = new ChainError({ type: "Other" }); - const errEncoded = codec.encode(err); - assertEquals(errEncoded, new Uint8Array([1, 0])); - assertEquals(codec.decode(errEncoded), err); -}); + )! + const codec = deriveCodec(ty.id) + const ok = null + const okEncoded = codec.encode(ok) + assertEquals(okEncoded, new Uint8Array([0])) + assertEquals(codec.decode(okEncoded), ok) + const err = new ChainError({ type: "Other" }) + const errEncoded = codec.encode(err) + assertEquals(errEncoded, new Uint8Array([1, 0])) + assertEquals(codec.decode(errEncoded), err) +}) Deno.test("Smart Contract codecs", async () => { - const [_, deriveCodec] = await setup("polkadot"); - const raw = await Deno.readTextFile("frame_metadata/raw_erc20_metadata.json"); - const normalized = ContractMetadata.normalize(JSON.parse(raw)); + const [_, deriveCodec] = await setup("polkadot") + const raw = await Deno.readTextFile("frame_metadata/raw_erc20_metadata.json") + const normalized = ContractMetadata.normalize(JSON.parse(raw)) for (const ty of normalized.V3.types) { - deriveCodec(ty.id); + deriveCodec(ty.id) } -}); +}) diff --git a/frame_metadata/Codec.ts b/frame_metadata/Codec.ts index e0d31af2a..72c99e266 100644 --- a/frame_metadata/Codec.ts +++ b/frame_metadata/Codec.ts @@ -1,120 +1,120 @@ -import * as $ from "../deps/scale.ts"; -import { $era } from "./Era.ts"; -import type * as M from "./mod.ts"; -import { TyVisitor } from "./TyVisitor.ts"; +import * as $ from "../deps/scale.ts" +import { $era } from "./Era.ts" +import type * as M from "./mod.ts" +import { TyVisitor } from "./TyVisitor.ts" -export type DeriveCodec = (typeI: number | M.Ty) => $.Codec; +export type DeriveCodec = (typeI: number | M.Ty) => $.Codec /** * All derived codecs for ZSTs will use this exact codec, * so `derivedCodec === $null` is true iff the type is a ZST. */ -export const $null = $.constant(null); +export const $null = $.constant(null) // TODO: tuple/array element skip optimization export function DeriveCodec(tys: M.Ty[]): DeriveCodec { const visitor = new TyVisitor<$.Codec>(tys, { unitStruct() { - return $null; + return $null }, wrapperStruct(_ty, inner) { - return this.visit(inner); + return this.visit(inner) }, tupleStruct(_ty, members) { - return $.tuple(...members.map((x) => this.visit(x))); + return $.tuple(...members.map((x) => this.visit(x))) }, objectStruct(ty) { - return $.object(...ty.fields.map((x): $.AnyField => [x.name!, this.visit(x.ty)])); + return $.object(...ty.fields.map((x): $.AnyField => [x.name!, this.visit(x.ty)])) }, option(_ty, some) { - return $.option(this.visit(some)); + return $.option(this.visit(some)) }, result(_ty, ok, err) { - return $.result(this.visit(ok), $.instance(ChainError, ["value", this.visit(err)])); + return $.result(this.visit(ok), $.instance(ChainError, ["value", this.visit(err)])) }, never() { - return $.never as any; + return $.never as any }, stringUnion(ty) { - const members: Record = {}; + const members: Record = {} for (const { index, name } of ty.members) { - members[index] = name; + members[index] = name } - return $.stringUnion(members); + return $.stringUnion(members) }, taggedUnion(ty) { - const members: Record = {}; + const members: Record = {} for (const { fields, name: type, index } of ty.members) { - let member: $.AnyTaggedUnionMember; + let member: $.AnyTaggedUnionMember if (fields.length === 0) { - member = [type]; + member = [type] } else if (fields[0]!.name === undefined) { // Tuple variant const $value = fields.length === 1 ? this.visit(fields[0]!.ty) - : $.tuple(...fields.map((f) => this.visit(f.ty))); - member = [type, ["value", $value]]; + : $.tuple(...fields.map((f) => this.visit(f.ty))) + member = [type, ["value", $value]] } else { // Object variant const memberFields = fields.map((field, i) => { return [ field.name || i, this.visit(field.ty), - ] as [string, $.Codec]; - }); - member = [type, ...memberFields]; + ] as [string, $.Codec] + }) + member = [type, ...memberFields] } - members[index] = member; + members[index] = member } - return $.taggedUnion("type", members); + return $.taggedUnion("type", members) }, uint8Array() { - return $.uint8Array; + return $.uint8Array }, array(ty) { - return $.array(this.visit(ty.typeParam)); + return $.array(this.visit(ty.typeParam)) }, sizedUint8Array(ty) { - return $.sizedUint8Array(ty.len); + return $.sizedUint8Array(ty.len) }, sizedArray(ty) { - return $.sizedArray(this.visit(ty.typeParam), ty.len); + return $.sizedArray(this.visit(ty.typeParam), ty.len) }, primitive(ty) { - if (ty.kind === "char") return $.str; - return $[ty.kind]; + if (ty.kind === "char") return $.str + return $[ty.kind] }, compact(ty) { - const inner = this.visit(ty.typeParam); - return $.compact(inner); + const inner = this.visit(ty.typeParam) + return $.compact(inner) }, bitSequence() { - return $.bitSequence; + return $.bitSequence }, map(_ty, key, val) { - return $.map(this.visit(key), this.visit(val)); + return $.map(this.visit(key), this.visit(val)) }, set(_ty, val) { - return $.set(this.visit(val)); + return $.set(this.visit(val)) }, era() { - return $era; + return $era }, lenPrefixedWrapper(_ty, inner) { - return $.lenPrefixed(this.visit(inner)); + return $.lenPrefixed(this.visit(inner)) }, circular(ty) { - return $.deferred(() => this.cache[ty.id]!); + return $.deferred(() => this.cache[ty.id]!) }, - }); + }) - return (ty) => visitor.visit(ty); + return (ty) => visitor.visit(ty) } export class ChainError extends Error { - override readonly name = "ChainError"; + override readonly name = "ChainError" constructor(readonly value: T) { - super(); + super() } } diff --git a/frame_metadata/Contract.ts b/frame_metadata/Contract.ts index e93174dbc..fa33bbde1 100644 --- a/frame_metadata/Contract.ts +++ b/frame_metadata/Contract.ts @@ -1,85 +1,85 @@ -import { unreachable } from "../deps/std/testing/asserts.ts"; -import { Ty, TyDef, UnionTyDefMember } from "./scale_info.ts"; +import { unreachable } from "../deps/std/testing/asserts.ts" +import { Ty, TyDef, UnionTyDefMember } from "./scale_info.ts" export interface ContractMetadata { - source: ContractMetadata.Source; - contract: ContractMetadata.Contract; - V3: ContractMetadata.Abi; + source: ContractMetadata.Source + contract: ContractMetadata.Contract + V3: ContractMetadata.Abi } export namespace ContractMetadata { // TODO: serde `Value` type - export type Value = unknown; + export type Value = unknown export interface Source { - hash: string; - language: string; - compiler: string; - wasm?: string; + hash: string + language: string + compiler: string + wasm?: string } export interface Contract { - name: string; - version: string; - authors: string[]; - description?: string; - documentation?: string; - repository?: string; - homepage?: string; - license?: string; + name: string + version: string + authors: string[] + description?: string + documentation?: string + repository?: string + homepage?: string + license?: string } export interface User { - json: Record; + json: Record } export interface Abi { - spec: Spec; - storage: Storage; + spec: Spec + storage: Storage // TODO: type the raw serde-defined shape? - types: Ty[]; + types: Ty[] } export interface Spec { - constructors: Constructor[]; - docs: string[]; - events: Event[]; - messages: Message[]; + constructors: Constructor[] + docs: string[] + events: Event[] + messages: Message[] } export interface Constructor { - args: Arg[]; - docs: string[]; - label: string; - payable: boolean; - selector: string; + args: Arg[] + docs: string[] + label: string + payable: boolean + selector: string } export interface Arg { - label: string; - type: TypeRef; - docs?: string[]; - indexed?: boolean; + label: string + type: TypeRef + docs?: string[] + indexed?: boolean } export interface Event { - args: Arg[]; - docs: string[]; - label: string; + args: Arg[] + docs: string[] + label: string } export interface TypeRef { - displayName: string[]; - type: number; + displayName: string[] + type: number } export interface Message { - args: Arg[]; - docs: string[]; - label: string; - mutates: boolean; - payable: boolean; - returnType: TypeRef; - selector: string; + args: Arg[] + docs: string[] + label: string + mutates: boolean + payable: boolean + returnType: TypeRef + selector: string } export interface Storage { @@ -87,13 +87,13 @@ export namespace ContractMetadata { fields: { layout: { cell: { - key: string; - ty: number; - }; - }; - name: string; - }[]; - }; + key: string + ty: number + } + } + name: string + }[] + } } // TODO: stricter typings? Not the most necessary atm. @@ -109,61 +109,61 @@ export namespace ContractMetadata { return { type: "Primitive", kind: def.primitive, - }; + } } else if (def.composite) { return { type: "Struct", fields: normalizeFields(def.composite.fields), - }; + } } else if (def.variant) { return { type: "Union", members: def.variant.variants.map((variant: any) => { - const { fields, ...rest } = variant; + const { fields, ...rest } = variant const member: UnionTyDefMember = { fields: fields ? normalizeFields(fields) : [], ...rest, - }; - return member; + } + return member }), - }; + } } else if (def.tuple) { return { type: "Tuple", fields: def.tuple, - }; + } } else if (def.array) { return { type: "SizedArray", len: def.array.len, typeParam: def.array.type, - }; + } } else if (def.sequence) { return { type: "Sequence", typeParam: def.sequence.type, - }; + } } else if (def.compact) { return { type: "Compact", typeParam: def.compact.typeParam, - }; + } } else if (def.bitSequence) { return { type: "BitSequence", bitOrderType: def.bitSequence.bitOrderType, bitStoreType: def.bitSequence.bitStoreType, - }; + } } - unreachable(); + unreachable() })(), - }; + } } function normalizeFields(fields: any[]) { return fields.map(({ type: ty, ...rest }: any) => { - return { ty, ...rest }; - }); + return { ty, ...rest } + }) } export function normalize( @@ -175,10 +175,10 @@ export namespace ContractMetadata { ...v3Rest, types: types.map(fromRawTy), }, - }; + } } export function tys(contractMetadata: ContractMetadata): Ty[] { - return normalize(contractMetadata).V3.types; + return normalize(contractMetadata).V3.types } } diff --git a/frame_metadata/Era.ts b/frame_metadata/Era.ts index 82c6855d7..1741de36e 100644 --- a/frame_metadata/Era.ts +++ b/frame_metadata/Era.ts @@ -1,15 +1,15 @@ -import * as $ from "../deps/scale.ts"; +import * as $ from "../deps/scale.ts" -export type Era = { type: "Immortal" } | { type: "Mortal"; period: bigint; phase: bigint }; +export type Era = { type: "Immortal" } | { type: "Mortal"; period: bigint; phase: bigint } export namespace era { - export const immortal: Era = { type: "Immortal" }; + export const immortal: Era = { type: "Immortal" } export function mortal(period: bigint, current: bigint): Era { - const adjustedPeriod = minN(maxN(nextPowerOfTwo(period), 4n), 1n << 16n); - const phase = current % adjustedPeriod; - const quantizeFactor = maxN(adjustedPeriod >> 12n, 1n); - const quantizedPhase = phase / quantizeFactor * quantizeFactor; - return { type: "Mortal", period: adjustedPeriod, phase: quantizedPhase }; + const adjustedPeriod = minN(maxN(nextPowerOfTwo(period), 4n), 1n << 16n) + const phase = current % adjustedPeriod + const quantizeFactor = maxN(adjustedPeriod >> 12n, 1n) + const quantizedPhase = phase / quantizeFactor * quantizeFactor + return { type: "Mortal", period: adjustedPeriod, phase: quantizedPhase } } } @@ -18,27 +18,27 @@ export const $era: $.Codec = $.createCodec({ _staticSize: 2, _encode(buffer, value) { if (value.type === "Immortal") { - buffer.array[buffer.index++] = 0; + buffer.array[buffer.index++] = 0 } else { - const quantizeFactor = maxN(value.period >> 12n, 1n); + const quantizeFactor = maxN(value.period >> 12n, 1n) const encoded = minN(maxN(trailingZeroes(value.period) - 1n, 1n), 15n) - | ((value.phase / quantizeFactor) << 4n); - $.u16._encode(buffer, Number(encoded)); + | ((value.phase / quantizeFactor) << 4n) + $.u16._encode(buffer, Number(encoded)) } }, _decode(buffer) { if (buffer.array[buffer.index] === 0) { - buffer.index++; - return { type: "Immortal" }; + buffer.index++ + return { type: "Immortal" } } else { - const encoded = BigInt($.u16._decode(buffer)); - const period = 2n << (encoded % (1n << 4n)); - const quantizeFactor = maxN(period >> 12n, 1n); - const phase = (encoded >> 4n) * quantizeFactor; + const encoded = BigInt($.u16._decode(buffer)) + const period = 2n << (encoded % (1n << 4n)) + const quantizeFactor = maxN(period >> 12n, 1n) + const phase = (encoded >> 4n) * quantizeFactor if (period >= 4n && phase <= period) { - return { type: "Mortal", period, phase }; + return { type: "Mortal", period, phase } } else { - throw new Error("Invalid period and phase"); + throw new Error("Invalid period and phase") } } }, @@ -48,31 +48,31 @@ export const $era: $.Codec = $.createCodec({ ["Mortal", ["period", $.u64], ["phase", $.u64]], ]) ._assert, -}); +}) function maxN(a: bigint, b: bigint) { - return a > b ? a : b; + return a > b ? a : b } function minN(a: bigint, b: bigint) { - return a > b ? a : b; + return a > b ? a : b } function trailingZeroes(n: bigint) { - let i = 0n; + let i = 0n while (!(n & 1n)) { - i++; - n >>= 1n; + i++ + n >>= 1n } - return i; + return i } function nextPowerOfTwo(n: bigint) { - n--; - let p = 1n; + n-- + let p = 1n while (n > 0n) { - p <<= 1n; - n >>= 1n; + p <<= 1n + n >>= 1n } - return p; + return p } diff --git a/frame_metadata/Extrinsic.ts b/frame_metadata/Extrinsic.ts index 625c76211..c507ca0f8 100644 --- a/frame_metadata/Extrinsic.ts +++ b/frame_metadata/Extrinsic.ts @@ -1,14 +1,14 @@ -import * as $ from "../deps/scale.ts"; -import { assert } from "../deps/std/testing/asserts.ts"; -import * as H from "../hashers/mod.ts"; -import * as ss58 from "../ss58/mod.ts"; -import { Hex, hex } from "../util/mod.ts"; -import { $null, DeriveCodec } from "./Codec.ts"; -import { Metadata } from "./Metadata.ts"; +import * as $ from "../deps/scale.ts" +import { assert } from "../deps/std/testing/asserts.ts" +import * as H from "../hashers/mod.ts" +import * as ss58 from "../ss58/mod.ts" +import { Hex, hex } from "../util/mod.ts" +import { $null, DeriveCodec } from "./Codec.ts" +import { Metadata } from "./Metadata.ts" export interface MultiAddress { - type: "Id" | "Index" | "Raw" | "Address20" | "Address32"; - value: Uint8Array; + type: "Id" | "Index" | "Raw" | "Address20" | "Address32" + value: Uint8Array } // TODO: delete upon common generated core types export namespace MultiAddress { @@ -16,167 +16,167 @@ export namespace MultiAddress { return { type: "Id", value: id, - }; + } } } export interface Signature { - type: "Sr25519" | "Ed25519" | "Secp256k"; // TODO: `"Ecdsa"`?; - value: Uint8Array; + type: "Sr25519" | "Ed25519" | "Secp256k" // TODO: `"Ecdsa"`?; + value: Uint8Array } export type Signer = | ((message: Uint8Array) => Signature | Promise) - | PolkadotSigner; + | PolkadotSigner export interface PolkadotSigner { - signPayload(payload: any): Promise<{ signature: string }>; + signPayload(payload: any): Promise<{ signature: string }> } export interface Extrinsic { - protocolVersion: number; + protocolVersion: number // TODO: make generic over chain signature?: & { - address: MultiAddress; - extra: unknown[]; + address: MultiAddress + extra: unknown[] } - & ({ additional: unknown[] } | { sig: Signature }); - palletName: string; - methodName: string; - args: Record; + & ({ additional: unknown[] } | { sig: Signature }) + palletName: string + methodName: string + args: Record } interface ExtrinsicCodecProps { - metadata: Metadata; - deriveCodec: DeriveCodec; - sign: Signer; - prefix: number; + metadata: Metadata + deriveCodec: DeriveCodec + sign: Signer + prefix: number } export function $extrinsic(props: ExtrinsicCodecProps): $.Codec { - const { metadata, deriveCodec } = props; - const { signedExtensions } = metadata.extrinsic; - const $sig = deriveCodec(findExtrinsicTypeParam("Signature")!) as $.Codec; - const $sigPromise = $.promise($sig); - const $address = deriveCodec(findExtrinsicTypeParam("Address")!); - const callTy = findExtrinsicTypeParam("Call")!; - assert(callTy?.type === "Union"); - const $call = deriveCodec(callTy); - const [$extra, extraPjsInfo] = getExtensionInfo(pjsExtraKeyMap, "ty"); + const { metadata, deriveCodec } = props + const { signedExtensions } = metadata.extrinsic + const $sig = deriveCodec(findExtrinsicTypeParam("Signature")!) as $.Codec + const $sigPromise = $.promise($sig) + const $address = deriveCodec(findExtrinsicTypeParam("Address")!) + const callTy = findExtrinsicTypeParam("Call")! + assert(callTy?.type === "Union") + const $call = deriveCodec(callTy) + const [$extra, extraPjsInfo] = getExtensionInfo(pjsExtraKeyMap, "ty") const [$additional, additionalPjsInfo] = getExtensionInfo( pjsAdditionalKeyMap, "additionalSigned", - ); - const pjsInfo = [...extraPjsInfo, ...additionalPjsInfo]; + ) + const pjsInfo = [...extraPjsInfo, ...additionalPjsInfo] - const toSignSize = $call._staticSize + $extra._staticSize + $additional._staticSize; - const totalSize = 1 + $address._staticSize + $sig._staticSize + toSignSize; + const toSignSize = $call._staticSize + $extra._staticSize + $additional._staticSize + const totalSize = 1 + $address._staticSize + $sig._staticSize + toSignSize const $baseExtrinsic: $.Codec = $.createCodec({ _metadata: [], _staticSize: totalSize, _encode(buffer, extrinsic) { - const firstByte = (+!!extrinsic.signature << 7) | extrinsic.protocolVersion; - buffer.array[buffer.index++] = firstByte; + const firstByte = (+!!extrinsic.signature << 7) | extrinsic.protocolVersion + buffer.array[buffer.index++] = firstByte const call = { type: extrinsic.palletName, value: { type: extrinsic.methodName, ...extrinsic.args, }, - }; - const { signature } = extrinsic; + } + const { signature } = extrinsic if (signature) { - $address._encode(buffer, signature.address); + $address._encode(buffer, signature.address) if ("additional" in signature) { - const toSignBuffer = new $.EncodeBuffer(buffer.stealAlloc(toSignSize)); - $call._encode(toSignBuffer, call); - const callEnd = toSignBuffer.finishedSize + toSignBuffer.index; + const toSignBuffer = new $.EncodeBuffer(buffer.stealAlloc(toSignSize)) + $call._encode(toSignBuffer, call) + const callEnd = toSignBuffer.finishedSize + toSignBuffer.index if ("signPayload" in props.sign) { - const exts = [...signature.extra, ...signature.additional]; - const extEnds = []; + const exts = [...signature.extra, ...signature.additional] + const extEnds = [] for (let i = 0; i < pjsInfo.length; i++) { - pjsInfo[i]!.codec._encode(toSignBuffer, exts[i]); - extEnds.push(toSignBuffer.finishedSize + toSignBuffer.index); + pjsInfo[i]!.codec._encode(toSignBuffer, exts[i]) + extEnds.push(toSignBuffer.finishedSize + toSignBuffer.index) } - const extraEnd = extEnds[extraPjsInfo.length - 1] ?? callEnd; - const toSignEncoded = toSignBuffer.finish(); - const callEncoded = toSignEncoded.subarray(0, callEnd); - const extraEncoded = toSignEncoded.subarray(callEnd, extraEnd); + const extraEnd = extEnds[extraPjsInfo.length - 1] ?? callEnd + const toSignEncoded = toSignBuffer.finish() + const callEncoded = toSignEncoded.subarray(0, callEnd) + const extraEncoded = toSignEncoded.subarray(callEnd, extraEnd) if (signature.address.type !== "Id") { - throw new Error("polkadot signer: address types other than Id are not supported"); + throw new Error("polkadot signer: address types other than Id are not supported") } const payload: Record = { address: ss58.encode(props.prefix, signature.address.value), method: hex.encodePrefixed(callEncoded), signedExtensions: signedExtensions.map((x) => x.ident), version: extrinsic.protocolVersion, - }; - let last = callEnd; + } + let last = callEnd for (let i = 0; i < pjsInfo.length; i++) { - const { key } = pjsInfo[i]!; - if (!key) throw new Error("polkadot signer: unknown extension"); + const { key } = pjsInfo[i]! + if (!key) throw new Error("polkadot signer: unknown extension") payload[key] = typeof exts[i] === "number" ? exts[i] - : hex.encodePrefixed(toSignEncoded.subarray(last, extEnds[i]!)); - last = extEnds[i]!; + : hex.encodePrefixed(toSignEncoded.subarray(last, extEnds[i]!)) + last = extEnds[i]! } - const signer = props.sign; + const signer = props.sign buffer.writeAsync(0, async (buffer) => { - const { signature } = await signer.signPayload(payload); - buffer.insertArray(hex.decode(signature as Hex)); - }); - buffer.insertArray(extraEncoded); - buffer.insertArray(callEncoded); + const { signature } = await signer.signPayload(payload) + buffer.insertArray(hex.decode(signature as Hex)) + }) + buffer.insertArray(extraEncoded) + buffer.insertArray(callEncoded) } else { - $extra._encode(toSignBuffer, signature.extra); - const extraEnd = toSignBuffer.finishedSize + toSignBuffer.index; - $additional._encode(toSignBuffer, signature.additional); - const toSignEncoded = toSignBuffer.finish(); - const callEncoded = toSignEncoded.subarray(0, callEnd); - const extraEncoded = toSignEncoded.subarray(callEnd, extraEnd); + $extra._encode(toSignBuffer, signature.extra) + const extraEnd = toSignBuffer.finishedSize + toSignBuffer.index + $additional._encode(toSignBuffer, signature.additional) + const toSignEncoded = toSignBuffer.finish() + const callEncoded = toSignEncoded.subarray(0, callEnd) + const extraEncoded = toSignEncoded.subarray(callEnd, extraEnd) const toSign = toSignEncoded.length > 256 ? H.Blake2_256.hash(toSignEncoded) - : toSignEncoded; - const sig = props.sign(toSign); + : toSignEncoded + const sig = props.sign(toSign) if (sig instanceof Promise) { - $sigPromise._encode(buffer, sig); + $sigPromise._encode(buffer, sig) } else { - $sig._encode(buffer, sig); + $sig._encode(buffer, sig) } - buffer.insertArray(extraEncoded); - buffer.insertArray(callEncoded); + buffer.insertArray(extraEncoded) + buffer.insertArray(callEncoded) } } else { - $sig._encode(buffer, signature.sig); - $extra._encode(buffer, signature.extra); - $call._encode(buffer, call); + $sig._encode(buffer, signature.sig) + $extra._encode(buffer, signature.extra) + $call._encode(buffer, call) } } else { - $call._encode(buffer, call); + $call._encode(buffer, call) } }, _decode(buffer) { - const firstByte = buffer.array[buffer.index++]!; - const hasSignature = firstByte & (1 << 7); - const protocolVersion = firstByte & ~(1 << 7); - let signature: Extrinsic["signature"]; + const firstByte = buffer.array[buffer.index++]! + const hasSignature = firstByte & (1 << 7) + const protocolVersion = firstByte & ~(1 << 7) + let signature: Extrinsic["signature"] if (hasSignature) { - const address = $address._decode(buffer) as MultiAddress; - const sig = $sig._decode(buffer); - const extra = $extra._decode(buffer); - signature = { address, sig, extra }; + const address = $address._decode(buffer) as MultiAddress + const sig = $sig._decode(buffer) + const extra = $extra._decode(buffer) + signature = { address, sig, extra } } - const call = $call._decode(buffer) as any; - const { type: palletName, value: { type: methodName, ...args } } = call; - return { protocolVersion, signature, palletName, methodName, args }; + const call = $call._decode(buffer) as any + const { type: palletName, value: { type: methodName, ...args } } = call + return { protocolVersion, signature, palletName, methodName, args } }, _assert(assert) { - assert.typeof(this, "object"); + assert.typeof(this, "object") assert .key(this, "protocolVersion") - .equals($.u8, 4); - const value_ = assert.value as any; + .equals($.u8, 4) + const value_ = assert.value as any // TODO: use `assert.key(this, "call")` upon merging https://github.com/paritytech/capi/pull/368 $call._assert( new $.AssertState({ @@ -186,28 +186,28 @@ export function $extrinsic(props: ExtrinsicCodecProps): $.Codec { ...value_.args, }, }), - ); + ) if (value_.signature) { - const signatureAssertState = assert.key(this, "signature"); - signatureAssertState.key($address, "address"); - signatureAssertState.key($extra, "extra"); + const signatureAssertState = assert.key(this, "signature") + signatureAssertState.key($address, "address") + signatureAssertState.key($extra, "extra") if ("additional" in signatureAssertState) { - signatureAssertState.key($additional, "additional"); + signatureAssertState.key($additional, "additional") } if ("sig" in signatureAssertState) { - signatureAssertState.key($sig, "sig"); + signatureAssertState.key($sig, "sig") } } }, - }); + }) return $.withMetadata( $.metadata("$extrinsic", $extrinsic, props), $.lenPrefixed($baseExtrinsic), - ); + ) function findExtrinsicTypeParam(name: string) { - return metadata.extrinsic.ty.params.find((x) => x.name === name)?.ty; + return metadata.extrinsic.ty.params.find((x) => x.name === name)?.ty } function getExtensionInfo( keyMap: Record, @@ -215,8 +215,8 @@ export function $extrinsic(props: ExtrinsicCodecProps): $.Codec { ): [codec: $.Codec, pjsInfo: { key: string | undefined; codec: $.Codec }[]] { const pjsInfo = signedExtensions .map((e) => ({ key: keyMap[e.ident], codec: deriveCodec(e[key]) })) - .filter((x) => x.codec !== $null); - return [$.tuple(...pjsInfo.map((x) => x.codec)), pjsInfo]; + .filter((x) => x.codec !== $null) + return [$.tuple(...pjsInfo.map((x) => x.codec)), pjsInfo] } } @@ -225,7 +225,7 @@ const pjsExtraKeyMap: Record = { CheckMortality: "era", ChargeTransactionPayment: "tip", CheckNonce: "nonce", -}; +} const pjsAdditionalKeyMap: Record = { CheckEra: "blockHash", @@ -234,4 +234,4 @@ const pjsAdditionalKeyMap: Record = { CheckTxVersion: "transactionVersion", CheckVersion: "specVersion", CheckGenesis: "genesisHash", -}; +} diff --git a/frame_metadata/Key.test.ts b/frame_metadata/Key.test.ts index f6c4b9425..782384a77 100644 --- a/frame_metadata/Key.test.ts +++ b/frame_metadata/Key.test.ts @@ -1,67 +1,67 @@ -import { assertEquals } from "../deps/std/testing/asserts.ts"; -import * as T from "../test_util/mod.ts"; -import * as U from "../util/mod.ts"; -import { $storageKey } from "./Key.ts"; -import { getPalletAndEntry } from "./Metadata.ts"; -import { setup } from "./test-common.ts"; +import { assertEquals } from "../deps/std/testing/asserts.ts" +import * as T from "../test_util/mod.ts" +import * as U from "../util/mod.ts" +import { $storageKey } from "./Key.ts" +import { getPalletAndEntry } from "./Metadata.ts" +import { setup } from "./test-common.ts" Deno.test("System Accounts Key", async () => { - const [metadata, deriveCodec] = await setup("polkadot"); - const systemAccountPalletAndEntry = getPalletAndEntry(metadata, "System", "Account"); - if (systemAccountPalletAndEntry instanceof Error) throw systemAccountPalletAndEntry; - const [pallet, storageEntry] = systemAccountPalletAndEntry; + const [metadata, deriveCodec] = await setup("polkadot") + const systemAccountPalletAndEntry = getPalletAndEntry(metadata, "System", "Account") + if (systemAccountPalletAndEntry instanceof Error) throw systemAccountPalletAndEntry + const [pallet, storageEntry] = systemAccountPalletAndEntry const $key = $storageKey({ deriveCodec, pallet, storageEntry, - }); - const partialKey: unknown[] = []; + }) + const partialKey: unknown[] = [] assertEquals( U.hex.encode($key.encode(partialKey)), "26aa394eea5630e07c48ae0c9558cef7b99d880ec681799c0cf30e8886371da9", - ); - const key = [T.alice.publicKey]; - const encoded = $key.encode(key); + ) + const key = [T.alice.publicKey] + const encoded = $key.encode(key) assertEquals( U.hex.encode(encoded), "26aa394eea5630e07c48ae0c9558cef7b99d880ec681799c0cf30e8886371da9de1e86a9a8c739864cf3cc5ec2bea59fd43593c715fdd31c61141abd04a99fd6822c8558854ccde39a5684e7a56da27d", - ); - const decoded = $key.decode(encoded); - assertEquals(decoded, key); -}); + ) + const decoded = $key.decode(encoded) + assertEquals(decoded, key) +}) Deno.test("Auction Winning Key", async () => { - const [metadata, deriveCodec] = await setup("polkadot"); - const [pallet, storageEntry] = U.throwIfError(getPalletAndEntry(metadata, "Auctions", "Winning")); + const [metadata, deriveCodec] = await setup("polkadot") + const [pallet, storageEntry] = U.throwIfError(getPalletAndEntry(metadata, "Auctions", "Winning")) const $key = $storageKey({ deriveCodec, pallet, storageEntry, - }); - const key = [5]; - const encoded = $key.encode(key); + }) + const key = [5] + const encoded = $key.encode(key) assertEquals( U.hex.encode(encoded), "ca32a41f4b3ed515863dc0a38697f84e4a20667fb1dc58cb22bcadfd9ab7f67c39b9d2792f8bd4c305000000", - ); - const decoded = $key.decode(encoded); - assertEquals(key, decoded); -}); + ) + const decoded = $key.decode(encoded) + assertEquals(key, decoded) +}) Deno.test("Multisig Multisigs partial storage Key", async () => { - const [metadata, deriveCodec] = await setup("polkadot"); + const [metadata, deriveCodec] = await setup("polkadot") const [pallet, storageEntry] = U.throwIfError( getPalletAndEntry(metadata, "Multisig", "Multisigs"), - ); + ) const $key = $storageKey({ deriveCodec, pallet, storageEntry, - }); - const key = [T.alice.publicKey]; - const encoded = $key.encode(key); + }) + const key = [T.alice.publicKey] + const encoded = $key.encode(key) assertEquals( U.hex.encode(encoded), "7474449cca95dc5d0c00e71735a6d17d3cd15a3fd6e04e47bee3922dbfa92c8d518366b5b1bc7c99d43593c715fdd31c61141abd04a99fd6822c8558854ccde39a5684e7a56da27d", - ); -}); + ) +}) diff --git a/frame_metadata/Key.ts b/frame_metadata/Key.ts index c36671aea..b68b23a05 100644 --- a/frame_metadata/Key.ts +++ b/frame_metadata/Key.ts @@ -1,63 +1,63 @@ -import * as $ from "../deps/scale.ts"; -import * as H from "../hashers/mod.ts"; -import { DeriveCodec } from "./Codec.ts"; -import * as M from "./Metadata.ts"; +import * as $ from "../deps/scale.ts" +import * as H from "../hashers/mod.ts" +import { DeriveCodec } from "./Codec.ts" +import * as M from "./Metadata.ts" -export type HasherLookup = { [_ in M.HasherKind]: (input: Uint8Array) => Uint8Array }; +export type HasherLookup = { [_ in M.HasherKind]: (input: Uint8Array) => Uint8Array } export interface StorageKeyProps { - deriveCodec: DeriveCodec; - pallet: M.Pallet; - storageEntry: M.StorageEntry; + deriveCodec: DeriveCodec + pallet: M.Pallet + storageEntry: M.StorageEntry } export function $storageKey(props: StorageKeyProps): $.Codec { - let keyCodecs: $.Codec[]; + let keyCodecs: $.Codec[] if (props.storageEntry.type === "Map") { - const codec = props.deriveCodec(props.storageEntry.key); + const codec = props.deriveCodec(props.storageEntry.key) if (props.storageEntry.hashers.length === 1) { - keyCodecs = [codec]; + keyCodecs = [codec] } else { if (codec._metadata[0]?.factory !== $.tuple) { - throw new Error("Expected key codec to be a tuple since there are multiple hashers"); + throw new Error("Expected key codec to be a tuple since there are multiple hashers") } - keyCodecs = codec._metadata[0]!.args; + keyCodecs = codec._metadata[0]!.args } } else { - keyCodecs = []; + keyCodecs = [] } - const palletHash = H.Twox128.hash(new TextEncoder().encode(props.pallet.name)); - const entryHash = H.Twox128.hash(new TextEncoder().encode(props.storageEntry.name)); + const palletHash = H.Twox128.hash(new TextEncoder().encode(props.pallet.name)) + const entryHash = H.Twox128.hash(new TextEncoder().encode(props.storageEntry.name)) const $keys = [...Array(keyCodecs.length + 1).keys()].reduce( (keys, i) => { keys[i] = $.tuple( ...keyCodecs.slice(0, i).map(($key, i) => H[(props.storageEntry as M.MapStorageEntryType).hashers[i]!].$hash($key) ), - ); - return keys; + ) + return keys }, {} as Record>, - ); + ) return $.createCodec({ _metadata: $.metadata("$storageKey", $storageKey, props), _staticSize: $keys[Object.values($keys).length - 1]!._staticSize, _encode(buffer, key) { - buffer.insertArray(palletHash); - buffer.insertArray(entryHash); - if (key.length === 0) return; - $keys[key.length]!._encode(buffer, key); + buffer.insertArray(palletHash) + buffer.insertArray(entryHash) + if (key.length === 0) return + $keys[key.length]!._encode(buffer, key) }, _decode(buffer) { // Ignore initial hashes - buffer.index += 32; - return $keys[Object.values($keys).length - 1]!._decode(buffer); + buffer.index += 32 + return $keys[Object.values($keys).length - 1]!._decode(buffer) }, _assert(assert) { - assert.instanceof(this, Array); - const value = assert.value as unknown[]; - if (value.length === 0) return; - $keys[value.length]!._assert(assert); + assert.instanceof(this, Array) + const value = assert.value as unknown[] + if (value.length === 0) return + $keys[value.length]!._assert(assert) }, - }); + }) } diff --git a/frame_metadata/Metadata.test.ts b/frame_metadata/Metadata.test.ts index 5eb167e53..3821dc68e 100644 --- a/frame_metadata/Metadata.test.ts +++ b/frame_metadata/Metadata.test.ts @@ -1,9 +1,9 @@ -import { _format } from "https://deno.land/std@0.158.0/path/_util.ts"; -import { assertSnapshot } from "../deps/std/testing/snapshot.ts"; -import { Metadata } from "./Metadata.ts"; -import { setup } from "./test-common.ts"; +import { _format } from "https://deno.land/std@0.158.0/path/_util.ts" +import { assertSnapshot } from "../deps/std/testing/snapshot.ts" +import { Metadata } from "./Metadata.ts" +import { setup } from "./test-common.ts" -const kInspect = Symbol.for("Deno.customInspect"); +const kInspect = Symbol.for("Deno.customInspect") for ( const name of [ @@ -17,42 +17,42 @@ for ( ] as const ) { Deno.test(name, async (t) => { - const [metadata] = await setup(name); - await assertSnapshot(t, serializeMetadata(metadata)); - }); + const [metadata] = await setup(name) + await assertSnapshot(t, serializeMetadata(metadata)) + }) } // Logging the metadata directly yields a finite but pathologically large string. // This inspect logic shows the expanded form of types only in the top level of the tys array. // In all other places, it uses an abbreviated form with the id and when applicable the type name. function serializeMetadata(metadata: Metadata): Metadata { - let shouldAbbrev = true; + let shouldAbbrev = true // @ts-ignore . metadata.tys[kInspect] = (inspect: any, args: any) => { - shouldAbbrev = false; - const result = inspect([...metadata.tys], args); - shouldAbbrev = true; - return result; - }; + shouldAbbrev = false + const result = inspect([...metadata.tys], args) + shouldAbbrev = true + return result + } for (const ty of metadata.tys) { const abbrev = `Ty#${ty.id}` + (ty.path?.length ? ` (${ty.path.join("::")})` : ty.type === "Primitive" ? ` (${ty.kind})` - : ""); + : "") // @ts-ignore . ty[kInspect] = (inspect: any, args: any) => { if (shouldAbbrev) { - return abbrev; + return abbrev } - shouldAbbrev = true; - const ty2 = { __proto__: { [Symbol.toStringTag]: abbrev }, ...ty }; + shouldAbbrev = true + const ty2 = { __proto__: { [Symbol.toStringTag]: abbrev }, ...ty } // @ts-ignore . - delete ty2[kInspect]; - const result = inspect(ty2, args); - shouldAbbrev = false; - return result; - }; + delete ty2[kInspect] + const result = inspect(ty2, args) + shouldAbbrev = false + return result + } } - return metadata; + return metadata } diff --git a/frame_metadata/Metadata.ts b/frame_metadata/Metadata.ts index 8cf46cd7d..8a528bb7e 100644 --- a/frame_metadata/Metadata.ts +++ b/frame_metadata/Metadata.ts @@ -1,8 +1,8 @@ -import * as $ from "../deps/scale.ts"; -import * as U from "../util/mod.ts"; -import { $tyId, $tys, Ty } from "./scale_info.ts"; +import * as $ from "../deps/scale.ts" +import * as U from "../util/mod.ts" +import { $tyId, $tys, Ty } from "./scale_info.ts" -export type HasherKind = $.Native; +export type HasherKind = $.Native const $hasherKind = $.stringUnion([ "Blake2_128", "Blake2_256", @@ -11,27 +11,27 @@ const $hasherKind = $.stringUnion([ "Twox256", "Twox64Concat", "Identity", -]); +]) -export type StorageEntryModifier = $.Native; +export type StorageEntryModifier = $.Native export const $storageEntryModifier = $.stringUnion([ "Optional", "Default", -]); +]) export interface PlainStorageEntryType { - type: "Plain"; - value: Ty; + type: "Plain" + value: Ty } export interface MapStorageEntryType { - type: "Map"; - hashers: HasherKind[]; - key: Ty; - value: Ty; + type: "Map" + hashers: HasherKind[] + key: Ty + value: Ty } -export type StorageEntryType = PlainStorageEntryType | MapStorageEntryType; +export type StorageEntryType = PlainStorageEntryType | MapStorageEntryType export const $storageEntryType: $.Codec = $.taggedUnion("type", [ ["Plain", ["value", $tyId]], @@ -41,14 +41,14 @@ export const $storageEntryType: $.Codec = $.taggedUnion("type" ["key", $tyId], ["value", $tyId], ], -]); +]) export type StorageEntry = { - name: string; - modifier: StorageEntryModifier; - default: Uint8Array; - docs: string[]; -} & StorageEntryType; + name: string + modifier: StorageEntryModifier + default: Uint8Array + docs: string[] +} & StorageEntryType export const $storageEntry: $.Codec = $.spread( $.spread( @@ -62,38 +62,38 @@ export const $storageEntry: $.Codec = $.spread( ["default", $.uint8Array], ["docs", $.array($.str)], ), -); +) export interface Storage { - prefix: string; - entries: StorageEntry[]; + prefix: string + entries: StorageEntry[] } export const $storage: $.Codec = $.object( ["prefix", $.str], ["entries", $.array($storageEntry)], -); +) export interface Constant { - name: string; - ty: Ty; - value: Uint8Array; - docs: string[]; + name: string + ty: Ty + value: Uint8Array + docs: string[] } export const $constant: $.Codec = $.object( ["name", $.str], ["ty", $tyId], ["value", $.uint8Array], ["docs", $.array($.str)], -); +) export interface Pallet { - name: string; - storage: Storage | undefined; - calls: Ty | undefined; - event: Ty | undefined; - constants: Constant[]; - error: Ty | undefined; - i: number; + name: string + storage: Storage | undefined + calls: Ty | undefined + event: Ty | undefined + constants: Constant[] + error: Ty | undefined + i: number } export const $pallet: $.Codec = $.object( ["name", $.str], @@ -103,39 +103,39 @@ export const $pallet: $.Codec = $.object( ["constants", $.array($constant)], ["error", $.option($tyId)], ["i", $.u8], -); +) export interface SignedExtensionMetadata { - ident: string; - ty: Ty; - additionalSigned: Ty; + ident: string + ty: Ty + additionalSigned: Ty } export const $signedExtensionMetadata: $.Codec = $.object( ["ident", $.str], ["ty", $tyId], ["additionalSigned", $tyId], -); +) export interface ExtrinsicDef { - ty: Ty; - version: number; - signedExtensions: SignedExtensionMetadata[]; + ty: Ty + version: number + signedExtensions: SignedExtensionMetadata[] } export const $extrinsicDef: $.Codec = $.object( ["ty", $tyId], ["version", $.u8], ["signedExtensions", $.array($signedExtensionMetadata)], -); +) // https://docs.substrate.io/v3/runtime/metadata/#encoded-metadata-format -export const magicNumber = 1635018093; +export const magicNumber = 1635018093 export interface Metadata { - magicNumber: typeof magicNumber; - version: 14; - tys: Ty[]; - pallets: Pallet[]; - extrinsic: ExtrinsicDef; + magicNumber: typeof magicNumber + version: 14 + tys: Ty[] + pallets: Pallet[] + extrinsic: ExtrinsicDef } export const $metadata: $.Codec = $.object( ["magicNumber", $.constant(magicNumber, $.u32)], @@ -143,31 +143,31 @@ export const $metadata: $.Codec = $.object( ["tys", $tys], ["pallets", $.array($pallet)], ["extrinsic", $extrinsicDef], -); +) export function fromPrefixedHex(scaleEncoded: string): Metadata { - return $metadata.decode(U.hex.decode(scaleEncoded as U.Hex)); + return $metadata.decode(U.hex.decode(scaleEncoded as U.Hex)) } export function getPallet(metadata: Metadata, name: string): Pallet | PalletNotFoundError { - return metadata.pallets.find((pallet) => pallet.name === name) || new PalletNotFoundError(); + return metadata.pallets.find((pallet) => pallet.name === name) || new PalletNotFoundError() } export class PalletNotFoundError extends Error { - override readonly name = "PalletNotFoundError"; + override readonly name = "PalletNotFoundError" } export function getEntry(pallet: Pallet, name: string): StorageEntry | EntryNotFoundError { - return pallet.storage?.entries.find((entry) => entry.name === name) || new EntryNotFoundError(); + return pallet.storage?.entries.find((entry) => entry.name === name) || new EntryNotFoundError() } export class EntryNotFoundError extends Error { - override readonly name = "EntryNotFoundError"; + override readonly name = "EntryNotFoundError" } export function getConst(pallet: Pallet, name: string): Constant | ConstNotFoundError { - return pallet.constants?.find((constant) => constant.name === name) || new ConstNotFoundError(); + return pallet.constants?.find((constant) => constant.name === name) || new ConstNotFoundError() } export class ConstNotFoundError extends Error { - override readonly name = "ConstNotFoundError"; + override readonly name = "ConstNotFoundError" } export function getPalletAndEntry( @@ -175,13 +175,13 @@ export function getPalletAndEntry( palletName: string, entryName: string, ): [Pallet, StorageEntry] | PalletNotFoundError | EntryNotFoundError { - const pallet = getPallet(metadata, palletName); + const pallet = getPallet(metadata, palletName) if (pallet instanceof Error) { - return pallet; + return pallet } - const entry = getEntry(pallet, entryName); + const entry = getEntry(pallet, entryName) if (entry instanceof Error) { - return entry; + return entry } - return [pallet, entry]; + return [pallet, entry] } diff --git a/frame_metadata/TyVisitor.ts b/frame_metadata/TyVisitor.ts index b6a1e85eb..7215ee400 100644 --- a/frame_metadata/TyVisitor.ts +++ b/frame_metadata/TyVisitor.ts @@ -8,134 +8,134 @@ import { TupleTyDef, Ty, UnionTyDef, -} from "./scale_info.ts"; +} from "./scale_info.ts" export interface TyVisitorMethods { - unitStruct(ty: Ty & (StructTyDef | TupleTyDef)): T; - wrapperStruct(ty: Ty & (StructTyDef | TupleTyDef), inner: Ty): T; - tupleStruct(ty: Ty & (StructTyDef | TupleTyDef), members: Ty[]): T; - objectStruct(ty: Ty & StructTyDef): T; + unitStruct(ty: Ty & (StructTyDef | TupleTyDef)): T + wrapperStruct(ty: Ty & (StructTyDef | TupleTyDef), inner: Ty): T + tupleStruct(ty: Ty & (StructTyDef | TupleTyDef), members: Ty[]): T + objectStruct(ty: Ty & StructTyDef): T - option(ty: Ty & UnionTyDef, some: Ty): T; - result(ty: Ty & UnionTyDef, ok: Ty, err: Ty): T; - never(ty: Ty & UnionTyDef): T; - stringUnion(ty: Ty & UnionTyDef): T; - taggedUnion(ty: Ty & UnionTyDef): T; + option(ty: Ty & UnionTyDef, some: Ty): T + result(ty: Ty & UnionTyDef, ok: Ty, err: Ty): T + never(ty: Ty & UnionTyDef): T + stringUnion(ty: Ty & UnionTyDef): T + taggedUnion(ty: Ty & UnionTyDef): T - uint8Array?(ty: Ty & SequenceTyDef): T; - array(ty: Ty & SequenceTyDef): T; + uint8Array?(ty: Ty & SequenceTyDef): T + array(ty: Ty & SequenceTyDef): T - sizedUint8Array?(ty: Ty & SizedArrayTyDef): T; - sizedArray(ty: Ty & SizedArrayTyDef): T; + sizedUint8Array?(ty: Ty & SizedArrayTyDef): T + sizedArray(ty: Ty & SizedArrayTyDef): T - primitive(ty: Ty & PrimitiveTyDef): T; - compact(ty: Ty & CompactTyDef): T; - bitSequence(ty: Ty & BitSequenceTyDef): T; + primitive(ty: Ty & PrimitiveTyDef): T + compact(ty: Ty & CompactTyDef): T + bitSequence(ty: Ty & BitSequenceTyDef): T - map?(ty: Ty & StructTyDef, key: Ty, value: Ty): T; - set?(ty: Ty & StructTyDef, value: Ty): T; + map?(ty: Ty & StructTyDef, key: Ty, value: Ty): T + set?(ty: Ty & StructTyDef, value: Ty): T - era?(ty: Ty & UnionTyDef): T; + era?(ty: Ty & UnionTyDef): T - lenPrefixedWrapper(ty: Ty & StructTyDef, inner: Ty): T; + lenPrefixedWrapper(ty: Ty & StructTyDef, inner: Ty): T - circular(ty: Ty): T; + circular(ty: Ty): T } export interface TyVisitor extends TyVisitorMethods {} export class TyVisitor { - cache: Record = {}; + cache: Record = {} constructor( public tys: Ty[], methods: TyVisitorMethods & ThisType>, ) { - Object.assign(this, methods); + Object.assign(this, methods) } visit(ty: number | Ty): T { if (typeof ty === "number") { - ty = this.tys[ty]!; + ty = this.tys[ty]! } - const i = ty.id; + const i = ty.id if (this.cache[i] != null) { - return this.cache[i]!; + return this.cache[i]! } if (this.cache[i] === null) { - return this.circular(ty); + return this.circular(ty) } - this.cache[i] = null; // circularity detection - const value = this._visit(ty); - this.cache[i] = value; - return value; + this.cache[i] = null // circularity detection + const value = this._visit(ty) + this.cache[i] = value + return value } _visit(ty: Ty) { if (ty.type === "Struct") { if (this.map && ty.path[0] === "BTreeMap") { - return this.map(ty, ty.params[0]!.ty!, ty.params[1]!.ty!); + return this.map(ty, ty.params[0]!.ty!, ty.params[1]!.ty!) } else if (this.set && ty.path[0] === "BTreeSet") { - return this.set(ty, ty.params[0]!.ty!); + return this.set(ty, ty.params[0]!.ty!) } else if (ty.path.at(-1) === "WrapperOpaque" || ty.path.at(-1) === "WrapperKeepOpaque") { - return this.lenPrefixedWrapper(ty, ty.params[0]!.ty!); + return this.lenPrefixedWrapper(ty, ty.params[0]!.ty!) } else if (ty.fields.length === 0) { - return this.unitStruct(ty); + return this.unitStruct(ty) } else if (ty.fields[0]!.name === undefined) { if (ty.fields.length === 1) { - return this.wrapperStruct(ty, ty.fields[0]!.ty); + return this.wrapperStruct(ty, ty.fields[0]!.ty) } else { - return this.tupleStruct(ty, ty.fields.map((x) => x.ty)); + return this.tupleStruct(ty, ty.fields.map((x) => x.ty)) } } else { - return this.objectStruct(ty); + return this.objectStruct(ty) } } else if (ty.type === "Tuple") { if (ty.fields.length === 0) { - return this.unitStruct(ty); + return this.unitStruct(ty) } else if (ty.fields.length === 1) { - return this.wrapperStruct(ty, ty.fields[0]!); + return this.wrapperStruct(ty, ty.fields[0]!) } else { - return this.tupleStruct(ty, ty.fields); + return this.tupleStruct(ty, ty.fields) } } else if (ty.type === "Union") { // TODO: revisit Option and Result if (ty.path[0] === "Option") { - return this.option(ty, ty.params[0]!.ty!); + return this.option(ty, ty.params[0]!.ty!) } else if (ty.path[0] === "Result") { - return this.result(ty, ty.params[0]!.ty!, ty.params[1]!.ty!); + return this.result(ty, ty.params[0]!.ty!, ty.params[1]!.ty!) } else if (this.era && ty.path.at(-1) === "Era") { - return this.era(ty); + return this.era(ty) } else if (ty.members.length === 0) { - return this.never(ty); + return this.never(ty) } else if (ty.members.every((x) => x.fields.length === 0)) { - return this.stringUnion(ty); + return this.stringUnion(ty) } else { - return this.taggedUnion(ty); + return this.taggedUnion(ty) } } else if (ty.type === "Sequence") { if (this.uint8Array && _isU8(ty.typeParam)) { - return this.uint8Array(ty); + return this.uint8Array(ty) } else { - return this.array(ty); + return this.array(ty) } } else if (ty.type === "SizedArray") { if (this.sizedUint8Array && _isU8(ty.typeParam)) { - return this.sizedUint8Array(ty); + return this.sizedUint8Array(ty) } else { - return this.sizedArray(ty); + return this.sizedArray(ty) } } else if (ty.type === "Primitive") { - return this.primitive(ty); + return this.primitive(ty) } else if (ty.type === "Compact") { - return this.compact(ty); + return this.compact(ty) } else if (ty.type === "BitSequence") { - return this.bitSequence(ty); + return this.bitSequence(ty) } else { - throw new Error("unreachable"); + throw new Error("unreachable") } } } function _isU8(ty: Ty) { - return ty.type === "Primitive" && ty.kind === "u8"; + return ty.type === "Primitive" && ty.kind === "u8" } diff --git a/frame_metadata/mod.ts b/frame_metadata/mod.ts index 0255e01e2..e6d1eff0c 100644 --- a/frame_metadata/mod.ts +++ b/frame_metadata/mod.ts @@ -1,8 +1,8 @@ -export * from "./Codec.ts"; -export * from "./Contract.ts"; -export * from "./Era.ts"; -export * from "./Extrinsic.ts"; -export * from "./Key.ts"; -export * from "./Metadata.ts"; -export * from "./scale_info.ts"; -export * from "./TyVisitor.ts"; +export * from "./Codec.ts" +export * from "./Contract.ts" +export * from "./Era.ts" +export * from "./Extrinsic.ts" +export * from "./Key.ts" +export * from "./Metadata.ts" +export * from "./scale_info.ts" +export * from "./TyVisitor.ts" diff --git a/frame_metadata/scale_info.ts b/frame_metadata/scale_info.ts index f4e3dbfaf..7cab6a647 100644 --- a/frame_metadata/scale_info.ts +++ b/frame_metadata/scale_info.ts @@ -1,60 +1,60 @@ -import * as $ from "../deps/scale.ts"; +import * as $ from "../deps/scale.ts" export class TyDecodeCtx { - tys: Ty[] | null = null; + tys: Ty[] | null = null } -const $compactU32 = $.compact($.u32); +const $compactU32 = $.compact($.u32) export const $tys: $.Codec = $.createCodec({ _metadata: $.metadata("$tys"), _staticSize: $compactU32._staticSize, _encode(buffer, value) { - $.array($ty)._encode(buffer, value); + $.array($ty)._encode(buffer, value) }, _decode(buffer) { - const length = $compactU32._decode(buffer); - const ctx = buffer.context.get(TyDecodeCtx); - const tys = ctx.tys = Array.from({ length }, (_, id) => ({ id } as Ty)); + const length = $compactU32._decode(buffer) + const ctx = buffer.context.get(TyDecodeCtx) + const tys = ctx.tys = Array.from({ length }, (_, id) => ({ id } as Ty)) for (let i = 0; i < length; i++) { - Object.assign(tys[i]!, $ty._decode(buffer)); + Object.assign(tys[i]!, $ty._decode(buffer)) } - return tys; + return tys }, _assert(assert) { - $.array($ty)._assert(assert); + $.array($ty)._assert(assert) }, -}); +}) export const $tyId: $.Codec = $.createCodec({ _metadata: $.metadata("$tyId"), _staticSize: $compactU32._staticSize, _encode(buffer, value) { - $compactU32._encode(buffer, value.id); + $compactU32._encode(buffer, value.id) }, _decode(buffer) { - const ctx = buffer.context.get(TyDecodeCtx); - const id = $compactU32._decode(buffer); - return ctx.tys?.[id] ?? { id } as any; + const ctx = buffer.context.get(TyDecodeCtx) + const id = $compactU32._decode(buffer) + return ctx.tys?.[id] ?? { id } as any }, _assert(assert) { - $compactU32._assert(assert.key(this, "id")); + $compactU32._assert(assert.key(this, "id")) }, -}); +}) export interface Field { - name: string | undefined; - ty: Ty; - typeName: string | undefined; - docs: string[]; + name: string | undefined + ty: Ty + typeName: string | undefined + docs: string[] } export const $field: $.Codec = $.object( ["name", $.option($.str)], ["ty", $tyId], ["typeName", $.option($.str)], ["docs", $.array($.str)], -); +) -export type PrimitiveKind = $.Native; +export type PrimitiveKind = $.Native const $primitiveKind = $.stringUnion([ "bool", "char", @@ -71,48 +71,48 @@ const $primitiveKind = $.stringUnion([ "i64", "i128", "i256", -]); +]) -export type TyType = TyDef["type"]; +export type TyType = TyDef["type"] export interface StructTyDef { - type: "Struct"; - fields: Field[]; + type: "Struct" + fields: Field[] } export interface UnionTyDefMember { - name: string; - fields: Field[]; - index: number; - docs: string[]; + name: string + fields: Field[] + index: number + docs: string[] } export interface UnionTyDef { - type: "Union"; - members: UnionTyDefMember[]; + type: "Union" + members: UnionTyDefMember[] } export interface SequenceTyDef { - type: "Sequence"; - typeParam: Ty; + type: "Sequence" + typeParam: Ty } export interface SizedArrayTyDef { - type: "SizedArray"; - len: number; - typeParam: Ty; + type: "SizedArray" + len: number + typeParam: Ty } export interface TupleTyDef { - type: "Tuple"; - fields: Ty[]; + type: "Tuple" + fields: Ty[] } export interface PrimitiveTyDef { - type: "Primitive"; - kind: PrimitiveKind; + type: "Primitive" + kind: PrimitiveKind } export interface CompactTyDef { - type: "Compact"; - typeParam: Ty; + type: "Compact" + typeParam: Ty } export interface BitSequenceTyDef { - type: "BitSequence"; - bitOrderType: Ty; - bitStoreType: Ty; + type: "BitSequence" + bitOrderType: Ty + bitStoreType: Ty } export type TyDef = | StructTyDef @@ -122,7 +122,7 @@ export type TyDef = | TupleTyDef | PrimitiveTyDef | CompactTyDef - | BitSequenceTyDef; + | BitSequenceTyDef export const $tyDef: $.Codec = $.taggedUnion("type", [ [ "Struct", @@ -166,23 +166,23 @@ export const $tyDef: $.Codec = $.taggedUnion("type", [ ["bitOrderType", $tyId], ["bitStoreType", $tyId], ], -]); +]) export interface Param { - name: string; - ty: Ty | undefined; + name: string + ty: Ty | undefined } export const $param: $.Codec = $.object( ["name", $.str], ["ty", $.option($tyId)], -); +) export type Ty = { - id: number; - path: string[]; - params: Param[]; - docs: string[]; -} & TyDef; + id: number + path: string[] + params: Param[] + docs: string[] +} & TyDef export const $ty: $.Codec = $.spread( $.spread( $.object( @@ -195,4 +195,4 @@ export const $ty: $.Codec = $.spread( $.object( ["docs", $.array($.str)], ), -); +) diff --git a/frame_metadata/test-common.ts b/frame_metadata/test-common.ts index f5b2d963a..850d1146c 100644 --- a/frame_metadata/test-common.ts +++ b/frame_metadata/test-common.ts @@ -1,7 +1,7 @@ -import * as path from "../deps/std/path.ts"; -import * as M from "../frame_metadata/mod.ts"; +import * as path from "../deps/std/path.ts" +import * as M from "../frame_metadata/mod.ts" -const downloadedDir = new URL("../frame_metadata/_downloaded", import.meta.url).pathname; +const downloadedDir = new URL("../frame_metadata/_downloaded", import.meta.url).pathname type NetworkName = | "acala" @@ -10,17 +10,17 @@ type NetworkName = | "polkadot" | "statemint" | "subsocial" - | "westend"; + | "westend" -const cache: Partial> = {}; +const cache: Partial> = {} export async function setup(networkName: NetworkName): Promise<[M.Metadata, M.DeriveCodec]> { - let res = cache[networkName]; + let res = cache[networkName] if (res) { - return res; + return res } - const metadataEncoded = await Deno.readTextFile(path.join(downloadedDir, `${networkName}.scale`)); - const metadata = M.fromPrefixedHex(metadataEncoded); - res = [metadata, M.DeriveCodec(metadata.tys)]; - cache[networkName] = res; - return res; + const metadataEncoded = await Deno.readTextFile(path.join(downloadedDir, `${networkName}.scale`)) + const metadata = M.fromPrefixedHex(metadataEncoded) + res = [metadata, M.DeriveCodec(metadata.tys)] + cache[networkName] = res + return res } diff --git a/hashers/blake2b.test.ts b/hashers/blake2b.test.ts index 18d92c4e3..51812fb53 100644 --- a/hashers/blake2b.test.ts +++ b/hashers/blake2b.test.ts @@ -1,11 +1,11 @@ -import * as refImpl from "https://esm.sh/@noble/hashes@1.1.2/blake2b"; -import { assertEquals } from "../deps/std/testing/asserts.ts"; -import { hex } from "../util/mod.ts"; -import { Blake2b } from "./blake2b.ts"; +import * as refImpl from "https://esm.sh/@noble/hashes@1.1.2/blake2b" +import { assertEquals } from "../deps/std/testing/asserts.ts" +import { hex } from "../util/mod.ts" +import { Blake2b } from "./blake2b.ts" const lorem = // cspell:disable-next-line - "Lorem ipsum dolor sit amet, consectetur adipiscing elit, sed do eiusmod tempor incididunt ut labore et dolore magna aliqua. Ut enim ad minim veniam, quis nostrud exercitation ullamco laboris nisi ut aliquip ex ea commodo consequat. Duis aute irure dolor in reprehenderit in voluptate velit esse cillum dolore eu fugiat nulla pariatur. Excepteur sint occaecat cupidatat non proident, sunt in culpa qui officia deserunt mollit anim id est laborum."; + "Lorem ipsum dolor sit amet, consectetur adipiscing elit, sed do eiusmod tempor incididunt ut labore et dolore magna aliqua. Ut enim ad minim veniam, quis nostrud exercitation ullamco laboris nisi ut aliquip ex ea commodo consequat. Duis aute irure dolor in reprehenderit in voluptate velit esse cillum dolore eu fugiat nulla pariatur. Excepteur sint occaecat cupidatat non proident, sunt in culpa qui officia deserunt mollit anim id est laborum." const hashes: [name: string, data: Uint8Array, hash: string][] = [ [ @@ -28,26 +28,26 @@ const hashes: [name: string, data: Uint8Array, hash: string][] = [ new TextEncoder().encode(lorem), "bf6a2cf4132a6436f90190ebbf436dd622a73294d44348046c32504c224aaa3d1c80ca83107eb9548ccba36e141183b3bfb0f38779c0d9ab76b5971590e79d5e", ], -]; +] for (const [name, data, hash] of hashes) { Deno.test(`${name} reference`, () => { - assertEquals(hex.encode(refImpl.blake2b(data)), hash); - }); + assertEquals(hex.encode(refImpl.blake2b(data)), hash) + }) Deno.test(`${name} straight`, () => { - const hasher = new Blake2b(); - hasher.update(data); - assertEquals(hex.encode(hasher.digest()), hash); - hasher.dispose(); - }); + const hasher = new Blake2b() + hasher.update(data) + assertEquals(hex.encode(hasher.digest()), hash) + hasher.dispose() + }) for (const chunkSize of [1, 13, 31, 32, 33, 49, 64, 65, 113]) { Deno.test(`${name} chunked ${chunkSize}`, () => { - const hasher = new Blake2b(); + const hasher = new Blake2b() for (let i = 0; i < data.length; i += chunkSize) { - hasher.update(data.slice(i, i + chunkSize)); + hasher.update(data.slice(i, i + chunkSize)) } - assertEquals(hex.encode(hasher.digest()), hash); - hasher.dispose(); - }); + assertEquals(hex.encode(hasher.digest()), hash) + hasher.dispose() + }) } } diff --git a/hashers/blake2b.ts b/hashers/blake2b.ts index 3ba03efcf..ffecc29b3 100644 --- a/hashers/blake2b.ts +++ b/hashers/blake2b.ts @@ -1,120 +1,120 @@ -import wasmCode from "./blake2b.wasm.ts"; +import wasmCode from "./blake2b.wasm.ts" -const memory = new WebAssembly.Memory({ initial: 1, maximum: 128 }); +const memory = new WebAssembly.Memory({ initial: 1, maximum: 128 }) -const wasmModule = new WebAssembly.Module(wasmCode); +const wasmModule = new WebAssembly.Module(wasmCode) const wasmInstance = new WebAssembly.Instance(wasmModule, { blake2b: { memory }, -}); +}) interface XxhashWasm { - free_mem: WebAssembly.Global; - reset(state_adr: number, dk_len: number): void; - update(state_adr: number, msg_adr: number, msg_end: number, written: number): void; - finish(state_adr: number, msg_adr: number, written: number): void; + free_mem: WebAssembly.Global + reset(state_adr: number, dk_len: number): void + update(state_adr: number, msg_adr: number, msg_end: number, written: number): void + finish(state_adr: number, msg_adr: number, written: number): void } -const wasm = wasmInstance.exports as never as XxhashWasm; +const wasm = wasmInstance.exports as never as XxhashWasm -let memBuf = new Uint8Array(memory.buffer); -let memI = wasm.free_mem.value; +let memBuf = new Uint8Array(memory.buffer) +let memI = wasm.free_mem.value -const pool: Blake2bInner[] = []; +const pool: Blake2bInner[] = [] const finReg = new FinalizationRegistry((inner) => { - pool.push(inner); -}); + pool.push(inner) +}) export class Blake2b { - private inner = pool.pop() ?? new Blake2bInner(); + private inner = pool.pop() ?? new Blake2bInner() constructor(public digestSize: number = 64) { - finReg.register(this, this.inner, this.inner); - this.inner.reset(digestSize); + finReg.register(this, this.inner, this.inner) + this.inner.reset(digestSize) } update(input: Uint8Array) { - this.inner.update(input); + this.inner.update(input) } digest() { - return this.inner.digest(this.digestSize); + return this.inner.digest(this.digestSize) } digestInto(digest: Uint8Array) { - this.inner.digestInto(this.digestSize, digest); + this.inner.digestInto(this.digestSize, digest) } dispose() { - pool.push(this.inner); - finReg.unregister(this.inner); - this.inner = null!; + pool.push(this.inner) + finReg.unregister(this.inner) + this.inner = null! } } class Blake2bInner { - exLoc = 0; - adr; - exLen = 0; - written = 0; + exLoc = 0 + adr + exLen = 0 + written = 0 constructor() { - ensureAvailable(128 + 64); - this.exLoc = memI; - memI += 128; - this.adr = memI; - memI += 64; + ensureAvailable(128 + 64) + this.exLoc = memI + memI += 128 + this.adr = memI + memI += 64 } reset(digestSize: number) { - wasm.reset(this.adr, digestSize); - this.written = 0; - this.exLen = 0; + wasm.reset(this.adr, digestSize) + this.written = 0 + this.exLen = 0 } update(input: Uint8Array) { - const total = this.exLen + input.length; + const total = this.exLen + input.length if (total <= 128) { - memBuf.set(input, this.exLoc + this.exLen); - this.exLen += input.length; - return; + memBuf.set(input, this.exLoc + this.exLen) + this.exLen += input.length + return } - ensureAvailable(total); + ensureAvailable(total) if (this.exLen) { - memBuf.set(memBuf.subarray(this.exLoc, this.exLoc + this.exLen), memI); + memBuf.set(memBuf.subarray(this.exLoc, this.exLoc + this.exLen), memI) } - memBuf.set(input, memI + this.exLen); - const excess = total % 128 || 128; - wasm.update(this.adr, memI, memI + total - excess, this.written); - this.written += total - excess; + memBuf.set(input, memI + this.exLen) + const excess = total % 128 || 128 + wasm.update(this.adr, memI, memI + total - excess, this.written) + this.written += total - excess if (excess) { - memBuf.set(input.subarray(input.length - excess), this.exLoc); + memBuf.set(input.subarray(input.length - excess), this.exLoc) } - this.exLen = excess; + this.exLen = excess } _digest() { - this.written += this.exLen; - memBuf.fill(0, this.exLoc + this.exLen, this.exLoc + 128); + this.written += this.exLen + memBuf.fill(0, this.exLoc + this.exLen, this.exLoc + 128) wasm.finish( this.adr, this.exLoc, this.written, - ); + ) } digestInto(digestSize: number, digest: Uint8Array) { - this._digest(); - digest.set(memBuf.subarray(this.adr, this.adr + digestSize)); + this._digest() + digest.set(memBuf.subarray(this.adr, this.adr + digestSize)) } digest(digestSize: number) { - this._digest(); - return memBuf.slice(this.adr, this.adr + digestSize); + this._digest() + return memBuf.slice(this.adr, this.adr + digestSize) } } function ensureAvailable(length: number) { - if (memI + length <= memBuf.length) return; - memory.grow(Math.ceil((memI + length - memBuf.length) / 65536)); - memBuf = new Uint8Array(memory.buffer); + if (memI + length <= memBuf.length) return + memory.grow(Math.ceil((memI + length - memBuf.length) / 65536)) + memBuf = new Uint8Array(memory.buffer) } diff --git a/hashers/blake2b.wasm.ts b/hashers/blake2b.wasm.ts index 3e16aff0d..819912d98 100644 --- a/hashers/blake2b.wasm.ts +++ b/hashers/blake2b.wasm.ts @@ -1,6 +1,6 @@ // @generated -import { Hex, hex } from "../util/mod.ts"; +import { Hex, hex } from "../util/mod.ts" export default hex.decode( "\ @@ -36,4 +36,4 @@ fd0b040020002000fd0004104110fd000400fd5141d000fd000400fd51fd0b04\ 4060106838082050285010402038300828785848701860680000081018202830\ 38404850586068707870502040487868300860001058382818\ " as Hex, -); +) diff --git a/hashers/mod.test.ts b/hashers/mod.test.ts index 7d1d6a5c2..311e03185 100644 --- a/hashers/mod.test.ts +++ b/hashers/mod.test.ts @@ -1,14 +1,14 @@ -import * as $ from "../deps/scale.ts"; -import { assertEquals } from "../deps/std/testing/asserts.ts"; -import { HasherKind } from "../frame_metadata/mod.ts"; -import { hex } from "../util/mod.ts"; -import * as H from "./mod.ts"; +import * as $ from "../deps/scale.ts" +import { assertEquals } from "../deps/std/testing/asserts.ts" +import { HasherKind } from "../frame_metadata/mod.ts" +import { hex } from "../util/mod.ts" +import * as H from "./mod.ts" interface Foo { - a: Uint8Array; - b: boolean[]; - c: Promise; - d: Foo | undefined; + a: Uint8Array + b: boolean[] + c: Promise + d: Foo | undefined } const $foo: $.Codec = $.object( @@ -16,7 +16,7 @@ const $foo: $.Codec = $.object( ["b", $.array($.bool)], ["c", $.promise($.str)], ["d", $.option($.deferred(() => $foo))], -); +) const foo: Foo = { a: new Uint8Array(1024), @@ -30,10 +30,10 @@ const foo: Foo = { c: Promise.resolve("abc"), d: undefined, }, -}; +} -const encoded = await $foo.encodeAsync(foo); -const hexEncoded = hex.encode(encoded); +const encoded = await $foo.encodeAsync(foo) +const hexEncoded = hex.encode(encoded) const hashes: Record = { Blake2_128: "1f709e4fba4e77dc0e5f0d8ad9a34772", @@ -43,17 +43,17 @@ const hashes: Record = { Twox128: "ea44441eaac4e86f012f973ddc3032b0", Twox256: "ea44441eaac4e86f012f973ddc3032b09ffb7852c4e93f2a9e6284582996b4f6", Twox64Concat: "ea44441eaac4e86f" + hexEncoded, -}; +} for (const hasherKind in hashes) { Deno.test(hasherKind, async () => { - const hasher = H[hasherKind as HasherKind]; - const hash = hashes[hasherKind as HasherKind]; - assertEquals(hex.encode(hasher.hash(encoded)), hash); - const hashData = await hasher.$hash($foo).encodeAsync(foo); - assertEquals(hex.encode(hashData), hash); + const hasher = H[hasherKind as HasherKind] + const hash = hashes[hasherKind as HasherKind] + assertEquals(hex.encode(hasher.hash(encoded)), hash) + const hashData = await hasher.$hash($foo).encodeAsync(foo) + assertEquals(hex.encode(hashData), hash) if (hasher.concat) { - assertEquals(hasher.$hash($foo).decode(hashData), foo); + assertEquals(hasher.$hash($foo).decode(hashData), foo) } - }); + }) } diff --git a/hashers/mod.ts b/hashers/mod.ts index d97748b13..096e6e06e 100644 --- a/hashers/mod.ts +++ b/hashers/mod.ts @@ -1,27 +1,27 @@ -import * as $ from "../deps/scale.ts"; -import { EncodeBuffer } from "../deps/scale.ts"; -import { Blake2b } from "./blake2b.ts"; -import { Xxhash } from "./xxhash.ts"; +import * as $ from "../deps/scale.ts" +import { EncodeBuffer } from "../deps/scale.ts" +import { Blake2b } from "./blake2b.ts" +import { Xxhash } from "./xxhash.ts" export abstract class Hasher { - abstract create(): Hashing; - abstract digestLength: number; - abstract concat: boolean; + abstract create(): Hashing + abstract digestLength: number + abstract concat: boolean $hash($inner: $.Codec): $.Codec { - return $hash(this, $inner); + return $hash(this, $inner) } hash(data: Uint8Array): Uint8Array { - const output = new Uint8Array(this.digestLength + (this.concat ? data.length : 0)); - const hashing = this.create(); - hashing.update(data); - hashing.digestInto(output); - hashing.dispose?.(); + const output = new Uint8Array(this.digestLength + (this.concat ? data.length : 0)) + const hashing = this.create() + hashing.update(data) + hashing.digestInto(output) + hashing.dispose?.() if (this.concat) { - output.set(data, this.digestLength); + output.set(data, this.digestLength) } - return output; + return output } } @@ -30,101 +30,101 @@ function $hash(hasher: Hasher, $inner: $.Codec): $.Codec { _metadata: $.metadata("$hash", $hash, hasher, $inner), _staticSize: hasher.digestLength + $inner._staticSize, _encode(buffer, value) { - const hashArray = buffer.array.subarray(buffer.index, buffer.index += hasher.digestLength); + const hashArray = buffer.array.subarray(buffer.index, buffer.index += hasher.digestLength) const cursor = hasher.concat ? buffer.createCursor($inner._staticSize) - : new EncodeBuffer(buffer.stealAlloc($inner._staticSize)); - $inner._encode(cursor, value); + : new EncodeBuffer(buffer.stealAlloc($inner._staticSize)) + $inner._encode(cursor, value) buffer.waitForBuffer(cursor, () => { - if (hasher.concat) (cursor as ReturnType).close(); - else cursor._commitWritten(); - const hashing = hasher.create(); - updateHashing(hashing, cursor); - hashing.digestInto(hashArray); - hashing.dispose?.(); - }); + if (hasher.concat) (cursor as ReturnType).close() + else cursor._commitWritten() + const hashing = hasher.create() + updateHashing(hashing, cursor) + hashing.digestInto(hashArray) + hashing.dispose?.() + }) }, _decode(buffer) { - if (!hasher.concat) throw new DecodeNonTransparentKeyError(); - buffer.index += hasher.digestLength; - return $inner._decode(buffer); + if (!hasher.concat) throw new DecodeNonTransparentKeyError() + buffer.index += hasher.digestLength + return $inner._decode(buffer) }, _assert(assert) { - $inner._assert(assert); + $inner._assert(assert) }, - }); + }) } export class Blake2Hasher extends Hasher { - digestLength; + digestLength constructor(size: 128 | 256, public concat: boolean) { - super(); - this.digestLength = size / 8; + super() + this.digestLength = size / 8 } create(): Hashing { - return new Blake2b(this.digestLength); + return new Blake2b(this.digestLength) } } export class IdentityHasher extends Hasher { - digestLength = 0; - concat = true; + digestLength = 0 + concat = true create(): Hashing { return { update() {}, digestInto() {}, - }; + } } override $hash($inner: $.Codec): $.Codec { - return $inner; + return $inner } override hash(data: Uint8Array): Uint8Array { - return data.slice(); + return data.slice() } } export class TwoxHasher extends Hasher { - digestLength; - rounds; + digestLength + rounds constructor(size: 64 | 128 | 256, public concat: boolean) { - super(); - this.digestLength = size / 8; - this.rounds = size / 64; + super() + this.digestLength = size / 8 + this.rounds = size / 64 } create(): Hashing { - return new Xxhash(this.rounds); + return new Xxhash(this.rounds) } } export interface Hashing { - update(data: Uint8Array): void; - digestInto(array: Uint8Array): void; - dispose?(): void; + update(data: Uint8Array): void + digestInto(array: Uint8Array): void + dispose?(): void } -export const Blake2_128 = new Blake2Hasher(128, false); -export const Blake2_128Concat = new Blake2Hasher(128, true); -export const Blake2_256 = new Blake2Hasher(256, false); -export const Identity = new IdentityHasher(); -export const Twox128 = new TwoxHasher(128, false); -export const Twox256 = new TwoxHasher(256, false); -export const Twox64Concat = new TwoxHasher(64, true); +export const Blake2_128 = new Blake2Hasher(128, false) +export const Blake2_128Concat = new Blake2Hasher(128, true) +export const Blake2_256 = new Blake2Hasher(256, false) +export const Identity = new IdentityHasher() +export const Twox128 = new TwoxHasher(128, false) +export const Twox256 = new TwoxHasher(256, false) +export const Twox64Concat = new TwoxHasher(64, true) function updateHashing(hashing: Hashing, data: EncodeBuffer) { for (const array of data.finishedArrays) { if (array instanceof EncodeBuffer) { - updateHashing(hashing, array); + updateHashing(hashing, array) } else { - hashing.update(array); + hashing.update(array) } } } export class DecodeNonTransparentKeyError extends Error { - override readonly name = "DecodeNonTransparentKeyError"; + override readonly name = "DecodeNonTransparentKeyError" } diff --git a/hashers/xxhash.test.ts b/hashers/xxhash.test.ts index 6adf76fdd..1dcea4d5e 100644 --- a/hashers/xxhash.test.ts +++ b/hashers/xxhash.test.ts @@ -1,11 +1,11 @@ -import * as refImpl from "https://esm.sh/@polkadot/util-crypto@10.1.6/xxhash/index.js"; -import { assertEquals } from "../deps/std/testing/asserts.ts"; -import { hex } from "../util/mod.ts"; -import { Xxhash } from "./xxhash.ts"; +import * as refImpl from "https://esm.sh/@polkadot/util-crypto@10.1.6/xxhash/index.js" +import { assertEquals } from "../deps/std/testing/asserts.ts" +import { hex } from "../util/mod.ts" +import { Xxhash } from "./xxhash.ts" const lorem = // cspell:disable-next-line - "Lorem ipsum dolor sit amet, consectetur adipiscing elit, sed do eiusmod tempor incididunt ut labore et dolore magna aliqua. Ut enim ad minim veniam, quis nostrud exercitation ullamco laboris nisi ut aliquip ex ea commodo consequat. Duis aute irure dolor in reprehenderit in voluptate velit esse cillum dolore eu fugiat nulla pariatur. Excepteur sint occaecat cupidatat non proident, sunt in culpa qui officia deserunt mollit anim id est laborum."; + "Lorem ipsum dolor sit amet, consectetur adipiscing elit, sed do eiusmod tempor incididunt ut labore et dolore magna aliqua. Ut enim ad minim veniam, quis nostrud exercitation ullamco laboris nisi ut aliquip ex ea commodo consequat. Duis aute irure dolor in reprehenderit in voluptate velit esse cillum dolore eu fugiat nulla pariatur. Excepteur sint occaecat cupidatat non proident, sunt in culpa qui officia deserunt mollit anim id est laborum." const hashes: [name: string, data: Uint8Array, hash: string][] = [ [ @@ -28,31 +28,31 @@ const hashes: [name: string, data: Uint8Array, hash: string][] = [ new TextEncoder().encode(lorem), "3056764314b1a8c5331511d478ff3f1203586c1a9308a54594234d9956fa6173dde73c6f4d92a266b80f4ea640fc58880baa7ba549315bf42b06d60439cdd4cc", ], -]; +] for (const [name, data, fullHash] of hashes) { for (const size of [64, 128, 256, 512] as const) { - const hash = fullHash.slice(0, size / 4); - const rounds = size / 64; + const hash = fullHash.slice(0, size / 4) + const rounds = size / 64 Deno.test(`${name} ${size} reference`, () => { - assertEquals(refImpl.xxhashAsHex(data, size).slice(2), hash); - }); + assertEquals(refImpl.xxhashAsHex(data, size).slice(2), hash) + }) Deno.test(`${name} ${size} straight`, () => { - const hasher = new Xxhash(rounds); - hasher.update(data); - assertEquals(hex.encode(hasher.digest()), hash); - hasher.dispose(); - }); + const hasher = new Xxhash(rounds) + hasher.update(data) + assertEquals(hex.encode(hasher.digest()), hash) + hasher.dispose() + }) if (size === 512) { for (const chunkSize of [1, 13, 31, 32, 33, 49, 64, 65, 113]) { Deno.test(`${name} ${size} chunked ${chunkSize}`, () => { - const hasher = new Xxhash(rounds); + const hasher = new Xxhash(rounds) for (let i = 0; i < data.length; i += chunkSize) { - hasher.update(data.slice(i, i + chunkSize)); + hasher.update(data.slice(i, i + chunkSize)) } - assertEquals(hex.encode(hasher.digest()), hash); - hasher.dispose(); - }); + assertEquals(hex.encode(hasher.digest()), hash) + hasher.dispose() + }) } } } diff --git a/hashers/xxhash.ts b/hashers/xxhash.ts index 4f09b0b3e..62cabe4eb 100644 --- a/hashers/xxhash.ts +++ b/hashers/xxhash.ts @@ -1,18 +1,18 @@ -import wasmCode from "./xxhash.wasm.ts"; +import wasmCode from "./xxhash.wasm.ts" -const memory = new WebAssembly.Memory({ initial: 1, maximum: 128 }); +const memory = new WebAssembly.Memory({ initial: 1, maximum: 128 }) -const wasmModule = new WebAssembly.Module(wasmCode); +const wasmModule = new WebAssembly.Module(wasmCode) const wasmInstance = new WebAssembly.Instance(wasmModule, { xxhash: { memory }, -}); +}) interface XxhashWasm { - max_rounds: WebAssembly.Global; - free_mem: WebAssembly.Global; - init_mod(): void; - reset(rounds: number, state_adr: number): void; - update(rounds: number, state_adr: number, pos: number, end: number): void; + max_rounds: WebAssembly.Global + free_mem: WebAssembly.Global + init_mod(): void + reset(rounds: number, state_adr: number): void + update(rounds: number, state_adr: number, pos: number, end: number): void digest( rounds: number, state_adr: number, @@ -20,92 +20,92 @@ interface XxhashWasm { pos: number, end: number, digest_adr: number, - ): void; + ): void } -const wasm = wasmInstance.exports as never as XxhashWasm; -wasm.init_mod(); +const wasm = wasmInstance.exports as never as XxhashWasm +wasm.init_mod() -const maxRounds = wasm.max_rounds.value; +const maxRounds = wasm.max_rounds.value -let memBuf = new Uint8Array(memory.buffer); -let memI = wasm.free_mem.value; +let memBuf = new Uint8Array(memory.buffer) +let memI = wasm.free_mem.value -const pool: XxhashInner[] = []; +const pool: XxhashInner[] = [] const finReg = new FinalizationRegistry((inner) => { - pool.push(inner); -}); + pool.push(inner) +}) export class Xxhash { - private inner = pool.pop() ?? new XxhashInner(); + private inner = pool.pop() ?? new XxhashInner() constructor(public rounds: number) { - finReg.register(this, this.inner, this.inner); - this.inner.reset(rounds); + finReg.register(this, this.inner, this.inner) + this.inner.reset(rounds) } update(input: Uint8Array) { - this.inner.update(this.rounds, input); + this.inner.update(this.rounds, input) } digest() { - return this.inner.digest(this.rounds); + return this.inner.digest(this.rounds) } digestInto(digest: Uint8Array) { - this.inner.digestInto(this.rounds, digest); + this.inner.digestInto(this.rounds, digest) } dispose() { - pool.push(this.inner); - finReg.unregister(this.inner); - this.inner = null!; + pool.push(this.inner) + finReg.unregister(this.inner) + this.inner = null! } } class XxhashInner { - exLoc = 0; - adr; - exLen = 0; - written = 0; + exLoc = 0 + adr + exLen = 0 + written = 0 constructor() { - ensureAvailable(32 + maxRounds * 32); - this.exLoc = memI; - memI += 32; - this.adr = memI; - memI += maxRounds * 32; + ensureAvailable(32 + maxRounds * 32) + this.exLoc = memI + memI += 32 + this.adr = memI + memI += maxRounds * 32 } reset(rounds: number) { - wasm.reset(rounds, this.adr); - this.written = 0; - this.exLen = 0; + wasm.reset(rounds, this.adr) + this.written = 0 + this.exLen = 0 } update(rounds: number, input: Uint8Array) { - this.written += input.length; - const total = this.exLen + input.length; + this.written += input.length + const total = this.exLen + input.length if (total < 32) { - memBuf.set(input, this.exLoc + this.exLen); - this.exLen += input.length; - return; + memBuf.set(input, this.exLoc + this.exLen) + this.exLen += input.length + return } - ensureAvailable(total); + ensureAvailable(total) if (this.exLen) { - memBuf.set(memBuf.subarray(this.exLoc, this.exLoc + this.exLen), memI); + memBuf.set(memBuf.subarray(this.exLoc, this.exLoc + this.exLen), memI) } - memBuf.set(input, memI + this.exLen); - const excess = total % 32; - wasm.update(rounds, this.adr, memI, memI + total - excess); + memBuf.set(input, memI + this.exLen) + const excess = total % 32 + wasm.update(rounds, this.adr, memI, memI + total - excess) if (excess) { - memBuf.set(input.subarray(input.length - excess), this.exLoc); + memBuf.set(input.subarray(input.length - excess), this.exLoc) } - this.exLen = excess; + this.exLen = excess } _digest(rounds: number) { - ensureAvailable(rounds * 8); + ensureAvailable(rounds * 8) wasm.digest( rounds, this.adr, @@ -113,22 +113,22 @@ class XxhashInner { this.exLoc, this.exLoc + this.exLen, memI, - ); + ) } digestInto(rounds: number, digest: Uint8Array) { - this._digest(rounds); - digest.set(memBuf.subarray(memI, memI + rounds * 8)); + this._digest(rounds) + digest.set(memBuf.subarray(memI, memI + rounds * 8)) } digest(rounds: number) { - this._digest(rounds); - return memBuf.slice(memI, memI + rounds * 8); + this._digest(rounds) + return memBuf.slice(memI, memI + rounds * 8) } } function ensureAvailable(length: number) { - if (memI + length <= memBuf.length) return; - memory.grow(Math.ceil((memI + length - memBuf.length) / 65536)); - memBuf = new Uint8Array(memory.buffer); + if (memI + length <= memBuf.length) return + memory.grow(Math.ceil((memI + length - memBuf.length) / 65536)) + memBuf = new Uint8Array(memory.buffer) } diff --git a/hashers/xxhash.wasm.ts b/hashers/xxhash.wasm.ts index 92fd65398..b3ae50647 100644 --- a/hashers/xxhash.wasm.ts +++ b/hashers/xxhash.wasm.ts @@ -1,6 +1,6 @@ // @generated -import { Hex, hex } from "../util/mod.ts"; +import { Hex, hex } from "../util/mod.ts" export default hex.decode( "\ @@ -38,4 +38,4 @@ d50110012303fdd50121022001fd1d008523027e23077c2001fd1d018523027e\ 208885210820072008370300200741086a2107200641016a22062000470d000b\ 0b\ " as Hex, -); +) diff --git a/mod.ts b/mod.ts index b0ca9e7a0..5667b3187 100644 --- a/mod.ts +++ b/mod.ts @@ -1,9 +1,9 @@ -export * as compat from "./compat/mod.ts"; -export * as $ from "./deps/scale.ts"; -export { BitSequence } from "./deps/scale.ts"; -export * as Z from "./deps/zones.ts"; -export * from "./effects/mod.ts"; -export * as M from "./frame_metadata/mod.ts"; +export * as compat from "./compat/mod.ts" +export * as $ from "./deps/scale.ts" +export { BitSequence } from "./deps/scale.ts" +export * as Z from "./deps/zones.ts" +export * from "./effects/mod.ts" +export * as M from "./frame_metadata/mod.ts" export { $era, $null, @@ -11,6 +11,6 @@ export { type Era, MultiAddress, type Signer, -} from "./frame_metadata/mod.ts"; -export * as rpc from "./rpc/mod.ts"; -export { contramapListener, hex, type Listener } from "./util/mod.ts"; +} from "./frame_metadata/mod.ts" +export * as rpc from "./rpc/mod.ts" +export { contramapListener, hex, type Listener } from "./util/mod.ts" diff --git a/rpc/client.test.ts b/rpc/client.test.ts index 03f6dc7ef..ec42128df 100644 --- a/rpc/client.test.ts +++ b/rpc/client.test.ts @@ -1,13 +1,13 @@ -import * as A from "../deps/std/testing/asserts.ts"; -import * as T from "../test_util/mod.ts"; -import * as U from "../util/mod.ts"; -import * as known from "./known/mod.ts"; -import * as msg from "./messages.ts"; +import * as A from "../deps/std/testing/asserts.ts" +import * as T from "../test_util/mod.ts" +import * as U from "../util/mod.ts" +import * as known from "./known/mod.ts" +import * as msg from "./messages.ts" Deno.test({ name: "RPC Client", async fn(t) { - const client = await T.polkadot.client; + const client = await T.polkadot.client await t.step({ name: "call", @@ -19,20 +19,20 @@ Deno.test({ id: client.providerRef.nextId(), method: "state_getMetadata", params: [], - }); - A.assertNotInstanceOf(metadata, Error); - A.assert(!metadata.error); - A.assertExists(metadata.result); + }) + A.assertNotInstanceOf(metadata, Error) + A.assert(!metadata.error) + A.assertExists(metadata.result) }, - }); + }) await t.step({ name: "subscribe", sanitizeOps: false, sanitizeResources: false, async fn() { - let subscriptionId: string; - const events: msg.NotificationMessage<"chain_subscribeAllHeads", known.Header>[] = []; + let subscriptionId: string + const events: msg.NotificationMessage<"chain_subscribeAllHeads", known.Header>[] = [] const stoppedSubscriptionId = await client.subscribe< "chain_subscribeAllHeads", known.Header @@ -42,23 +42,23 @@ Deno.test({ method: "chain_subscribeAllHeads", params: [], }, function(event) { - const counter = this.state(U.Counter); - A.assertNotInstanceOf(event, Error); - A.assert(!event.error); - A.assertExists(event.params.result.parentHash); - subscriptionId = event.params.subscription; - events.push(event); + const counter = this.state(U.Counter) + A.assertNotInstanceOf(event, Error) + A.assert(!event.error) + A.assertExists(event.params.result.parentHash) + subscriptionId = event.params.subscription + events.push(event) if (counter.i === 2) { - this.stop(); - return; + this.stop() + return } - counter.inc(); - }); - A.assertEquals(events.length, 3); - A.assertEquals(stoppedSubscriptionId, subscriptionId!); + counter.inc() + }) + A.assertEquals(events.length, 3) + A.assertEquals(stoppedSubscriptionId, subscriptionId!) }, - }); + }) - await client.discard(); + await client.discard() }, -}); +}) diff --git a/rpc/client.ts b/rpc/client.ts index 84acdf0c3..8ff0b845e 100644 --- a/rpc/client.ts +++ b/rpc/client.ts @@ -1,13 +1,13 @@ -import { Deferred, deferred } from "../deps/std/async.ts"; -import { getOrInit } from "../util/mod.ts"; -import * as U from "../util/mod.ts"; -import * as msg from "./messages.ts"; -import { Provider, ProviderListener } from "./provider/base.ts"; -import { ProviderHandlerError, ProviderSendError } from "./provider/errors.ts"; +import { Deferred, deferred } from "../deps/std/async.ts" +import { getOrInit } from "../util/mod.ts" +import * as U from "../util/mod.ts" +import * as msg from "./messages.ts" +import { Provider, ProviderListener } from "./provider/base.ts" +import { ProviderHandlerError, ProviderSendError } from "./provider/errors.ts" // TODO: delete this upon solving inner-type-access problem of RPC effects -export declare const ClientE_: unique symbol; -export type ClientE_ = typeof ClientE_; +export declare const ClientE_: unique symbol +export type ClientE_ = typeof ClientE_ export class Client< DiscoveryValue = any, @@ -17,80 +17,80 @@ export class Client< > { // TODO: delete this as well pending the above `TODO` ^ declare [ClientE_]: { - send: SendErrorData; - handler: HandlerErrorData; - close: CloseErrorData; - }; + send: SendErrorData + handler: HandlerErrorData + close: CloseErrorData + } - providerRef; - pendingCalls: Record> = {}; - pendingSubscriptions: SubscriptionListeners = {}; - activeSubscriptions: SubscriptionListeners = {}; - activeSubscriptionByMessageId: Record = {}; - subscriptionStates = new Map any, any>>(); + providerRef + pendingCalls: Record> = {} + pendingSubscriptions: SubscriptionListeners = {} + activeSubscriptions: SubscriptionListeners = {} + activeSubscriptionByMessageId: Record = {} + subscriptionStates = new Map any, any>>() constructor( readonly provider: Provider, readonly discoveryValue: DiscoveryValue, ) { - this.providerRef = provider(discoveryValue, this.#listener); + this.providerRef = provider(discoveryValue, this.#listener) } #listener: ProviderListener = (e) => { if (e instanceof ProviderSendError) { - const egressMessageId = e.egressMessage.id; - const pendingCall = this.pendingCalls[egressMessageId]; - pendingCall?.resolve(e); - delete this.pendingCalls[egressMessageId]; + const egressMessageId = e.egressMessage.id + const pendingCall = this.pendingCalls[egressMessageId] + pendingCall?.resolve(e) + delete this.pendingCalls[egressMessageId] } else if (e instanceof Error) { for (const id in this.pendingCalls) { - const pendingCall = this.pendingCalls[id]!; - pendingCall.resolve(e); - delete this.pendingCalls[id]; - this.pendingSubscriptions[id]!(e); - delete this.pendingSubscriptions[id]; + const pendingCall = this.pendingCalls[id]! + pendingCall.resolve(e) + delete this.pendingCalls[id] + this.pendingSubscriptions[id]!(e) + delete this.pendingSubscriptions[id] } for (const id in this.activeSubscriptions) { - this.activeSubscriptions[id]!(e); - delete this.activeSubscriptions[id]; - this.subscriptionStates.delete(id); + this.activeSubscriptions[id]!(e) + delete this.activeSubscriptions[id] + this.subscriptionStates.delete(id) } } else if (e.id) { - const pendingCall = this.pendingCalls[e.id]; - pendingCall?.resolve(e); - delete this.pendingCalls[e.id]; + const pendingCall = this.pendingCalls[e.id] + pendingCall?.resolve(e) + delete this.pendingCalls[e.id] if (this.pendingSubscriptions[e.id]) { if (e.error) { - this.pendingSubscriptions[e.id]!(e); + this.pendingSubscriptions[e.id]!(e) } else { - this.activeSubscriptions[e.result] = this.pendingSubscriptions[e.id]!; - this.activeSubscriptionByMessageId[e.id] = e.result; + this.activeSubscriptions[e.result] = this.pendingSubscriptions[e.id]! + this.activeSubscriptionByMessageId[e.id] = e.result } - delete this.pendingSubscriptions[e.id]; + delete this.pendingSubscriptions[e.id] } } else if (e.params) { - this.activeSubscriptions[e.params.subscription]?.(e); + this.activeSubscriptions[e.params.subscription]?.(e) } - }; + } call: ClientCall = (message) => { - const waiter = deferred>(); - this.pendingCalls[message.id] = waiter; - this.providerRef.send(message); - return waiter; - }; + const waiter = deferred>() + this.pendingCalls[message.id] = waiter + this.providerRef.send(message) + return waiter + } subscribe: ClientSubscribe = (message, listener) => { - const waiter = deferred(); + const waiter = deferred() const stop = () => { - delete this.pendingSubscriptions[message.id]; - const activeSubscriptionId = this.activeSubscriptionByMessageId[message.id]; + delete this.pendingSubscriptions[message.id] + const activeSubscriptionId = this.activeSubscriptionByMessageId[message.id] if (activeSubscriptionId) { - delete this.activeSubscriptions[activeSubscriptionId]; + delete this.activeSubscriptions[activeSubscriptionId] } - delete this.activeSubscriptionByMessageId[message.id]; - waiter.resolve(activeSubscriptionId); - }; + delete this.activeSubscriptionByMessageId[message.id] + waiter.resolve(activeSubscriptionId) + } const listenerBound = listener.bind({ message, stop, @@ -99,39 +99,39 @@ export class Client< getOrInit(this.subscriptionStates, message.id, () => new WeakMap()), ctor, () => new ctor(), - ); + ) }, - }) as ClientSubscribeListener; - this.pendingSubscriptions[message.id] = listenerBound; + }) as ClientSubscribeListener + this.pendingSubscriptions[message.id] = listenerBound this.call(message) .then((maybeError) => { if (maybeError instanceof Error) { - listenerBound(maybeError); - stop(); + listenerBound(maybeError) + stop() } - }); - return waiter; - }; + }) + return waiter + } discard = () => { - this.pendingCalls = {}; - this.pendingSubscriptions = {}; - this.activeSubscriptions = {}; - this.activeSubscriptionByMessageId = {}; - this.subscriptionStates.clear(); - return this.providerRef.release(); - }; + this.pendingCalls = {} + this.pendingSubscriptions = {} + this.activeSubscriptions = {} + this.activeSubscriptionByMessageId = {} + this.subscriptionStates.clear() + return this.providerRef.release() + } } export type ClientCallEvent = | msg.OkMessage | msg.ErrorMessage | ProviderSendError - | ProviderHandlerError; + | ProviderHandlerError export type ClientCall = ( message: msg.EgressMessage, -) => Promise>; +) => Promise> export type ClientSubscriptionEvent< SendErrorData, @@ -142,12 +142,12 @@ export type ClientSubscriptionEvent< | msg.NotificationMessage | msg.ErrorMessage | ProviderSendError - | ProviderHandlerError; + | ProviderHandlerError type SubscriptionListeners = Record< string, ClientSubscribeListener ->; +> export type ClientSubscribe = < Method extends string = string, @@ -161,7 +161,7 @@ export type ClientSubscribe = < Method, Result >, -) => Promise; +) => Promise export type ClientSubscribeListener< SendErrorData, @@ -172,10 +172,10 @@ export type ClientSubscribeListener< > = U.Listener< ClientSubscriptionEvent, Context ->; +> export interface ClientSubscribeContext { - message: msg.EgressMessage; - stop: () => void; - state: (ctor: new() => T) => T; + message: msg.EgressMessage + stop: () => void + state: (ctor: new() => T) => T } diff --git a/rpc/known/author.ts b/rpc/known/author.ts index 37b73e855..5483b2e97 100644 --- a/rpc/known/author.ts +++ b/rpc/known/author.ts @@ -1,4 +1,4 @@ -import { Hash, Hex, RpcResult, SerdeEnum, Subscription } from "./utils.ts"; +import { Hash, Hex, RpcResult, SerdeEnum, Subscription } from "./utils.ts" // https://github.com/paritytech/substrate/blob/e0ccd00/client/transaction-pool/api/src/lib.rs#L104 /** @@ -51,32 +51,32 @@ import { Hash, Hex, RpcResult, SerdeEnum, Subscription } from "./utils.ts"; */ export type TransactionStatus = SerdeEnum<{ /** Transaction is part of the future queue. */ - future: void; + future: void /** Transaction is part of the ready queue. */ - ready: void; + ready: void /** The transaction has been broadcast to the given peers. */ - broadcast: string[]; + broadcast: string[] /** Transaction has been included in block with given hash. */ - inBlock: Hash; + inBlock: Hash /** The block this transaction was included in has been retracted. */ - retracted: Hash; + retracted: Hash /** * Maximum number of finality watchers has been reached, * old watchers are being removed. */ - finalityTimeout: Hash; + finalityTimeout: Hash /** Transaction has been finalized by a finality-gadget, e.g GRANDPA */ - finalized: Hash; + finalized: Hash /** * Transaction has been replaced in the pool, by another transaction * that provides the same tags. (e.g. same (sender, nonce)). */ - usurped: Hash; + usurped: Hash /** Transaction has been dropped from the pool because of the limit. */ - dropped: void; + dropped: void /** Transaction is no longer valid in the current state. */ - invalid: void; -}>; + invalid: void +}> export namespace TransactionStatus { // TODO: convert into type guard? @@ -84,7 +84,7 @@ export namespace TransactionStatus { return typeof inQuestion === "string" ? inQuestion === "invalid" || inQuestion === "dropped" : !!(inQuestion.finalized || inQuestion.finalityTimeout || inQuestion.retracted - || inQuestion.usurped); + || inQuestion.usurped) } } @@ -96,34 +96,34 @@ export namespace TransactionStatus { */ export type ExtrinsicOrHash = SerdeEnum<{ /** The hash of the extrinsic. */ - hash: Hash; + hash: Hash /** Raw extrinsic bytes. */ - extrinsic: Hex; -}>; + extrinsic: Hex +}> // https://github.com/paritytech/substrate/blob/e0ccd00/client/rpc-api/src/author/mod.rs#L30 export type AuthorRpc = { /** Submit hex-encoded extrinsic for inclusion in block. */ - author_submitExtrinsic(extrinsic: Hex): RpcResult; + author_submitExtrinsic(extrinsic: Hex): RpcResult /** Insert a key into the keystore. */ - author_insertKey(keyType: string, suri: string, publicKey: Hex): RpcResult; + author_insertKey(keyType: string, suri: string, publicKey: Hex): RpcResult /** Generate new session keys and returns the corresponding public keys. */ - author_rotateKeys(): RpcResult; + author_rotateKeys(): RpcResult /** * Checks if the keystore has private keys for the given session public keys. * `sessionKeys` is the SCALE encoded session keys object from the runtime. * Returns `true` iff all private keys could be found. */ - author_hasSessionKeys(sessionsKeys: Hex): RpcResult; + author_hasSessionKeys(sessionsKeys: Hex): RpcResult /** * Checks if the keystore has private keys for the given public key and key type. * Returns `true` if a private key could be found. */ - author_hasKey(pubKey: Hex, keyType: string): RpcResult; + author_hasKey(pubKey: Hex, keyType: string): RpcResult /** Returns all pending extrinsics, potentially grouped by sender. */ - author_pendingExtrinsics(): RpcResult; + author_pendingExtrinsics(): RpcResult /** Remove given extrinsic from the pool and temporarily ban it to prevent reimporting. */ - author_removeExtrinsic(extrinsics: ExtrinsicOrHash[]): RpcResult; // todo + author_removeExtrinsic(extrinsics: ExtrinsicOrHash[]): RpcResult // todo /** * Submit an extrinsic to watch. * @@ -132,8 +132,8 @@ export type AuthorRpc = { */ author_submitAndWatchExtrinsic( extrinsic: Hex, - ): RpcResult>; + ): RpcResult> author_unwatchExtrinsic( subscription: Subscription<"author_submitAndWatchExtrinsic", TransactionStatus>, - ): RpcResult; -}; + ): RpcResult +} diff --git a/rpc/known/babe.ts b/rpc/known/babe.ts index c2a9153b4..b148a651d 100644 --- a/rpc/known/babe.ts +++ b/rpc/known/babe.ts @@ -1,14 +1,14 @@ -import { AccountId, RpcResult } from "./utils.ts"; +import { AccountId, RpcResult } from "./utils.ts" // https://github.com/paritytech/substrate/blob/9b01569/client/consensus/babe/rpc/src/lib.rs#L154 /** Holds information about the `slot`'s that can be claimed by a given key. */ export interface EpochAuthorship { /** the array of primary slots that can be claimed */ - primary: number[]; + primary: number[] /** the array of secondary slots that can be claimed */ - secondary: number[]; + secondary: number[] /** The array of secondary VRF slots that can be claimed. */ - secondary_vrf: number[]; + secondary_vrf: number[] } // https://github.com/paritytech/substrate/blob/9b01569/client/consensus/babe/rpc/src/lib.rs#L44 @@ -17,5 +17,5 @@ export type BabeRpc = { * Returns data about which slots (primary or secondary) can be claimed in * the current epoch with the keys in the keystore. */ - babe_epochAuthorship(): RpcResult>; -}; + babe_epochAuthorship(): RpcResult> +} diff --git a/rpc/known/beefy.ts b/rpc/known/beefy.ts index bf935b55f..2f1a418d7 100644 --- a/rpc/known/beefy.ts +++ b/rpc/known/beefy.ts @@ -1,11 +1,11 @@ -import { Hash, Hex, RpcResult, Subscription } from "./utils.ts"; +import { Hash, Hex, RpcResult, Subscription } from "./utils.ts" // https://github.com/paritytech/substrate/blob/317808a/client/beefy/rpc/src/lib.rs#L84 export type BeefyRpc = { /** Returns the block most recently finalized by BEEFY, alongside side its justification. */ beefy_subscribeJustifications(): RpcResult< Subscription<"beefy_subscribeJustifications", Hex> - >; + > /** * Returns hash of the latest BEEFY finalized block as seen by this client. * @@ -13,5 +13,5 @@ export type BeefyRpc = { * in the network or if the client is still initializing or syncing with the network. * In such case an error would be returned. */ - beefy_getFinalizedHead(): RpcResult; -}; + beefy_getFinalizedHead(): RpcResult +} diff --git a/rpc/known/chain.ts b/rpc/known/chain.ts index 1343d0afb..28ab8b193 100644 --- a/rpc/known/chain.ts +++ b/rpc/known/chain.ts @@ -1,76 +1,76 @@ -import { HexEncoded } from "../../util/branded.ts"; -import { Hash, Hex, ListOrValue, NumberOrHex, RpcResult, Subscription } from "./utils.ts"; +import { HexEncoded } from "../../util/branded.ts" +import { Hash, Hex, ListOrValue, NumberOrHex, RpcResult, Subscription } from "./utils.ts" // https://github.com/paritytech/substrate/blob/0ba251c/primitives/runtime/src/generic/digest.rs /** Generic header digest. */ export interface Digest { /** A list of logs in the digest. */ - logs: Hex[]; + logs: Hex[] } // https://github.com/paritytech/substrate/blob/01a3ad65/primitives/runtime/src/generic/header.rs#L39 /** Abstraction over a block header for a substrate chain. */ export interface Header { /** The parent hash. */ - parentHash: Hash; + parentHash: Hash /** The block number. */ - number: HexEncoded; + number: HexEncoded /** The state trie merkle root */ - stateRoot: Hash; + stateRoot: Hash /** The merkle root of the extrinsics. */ - extrinsicsRoot: Hash; + extrinsicsRoot: Hash /** A chain-specific digest of data useful for light clients or referencing auxiliary data. */ - digest: Digest; + digest: Digest } // https://github.com/paritytech/substrate/blob/ded44948/primitives/runtime/src/generic/block.rs#L126 /** Abstraction over a substrate block and justification. */ export interface SignedBlock { /** Full block. */ - block: Block; + block: Block /** Block justification. */ - justifications?: [number[], number[]][]; + justifications?: [number[], number[]][] } // https://github.com/paritytech/substrate/blob/ded44948/primitives/runtime/src/generic/block.rs#L88 export interface Block { /** The block header. */ - header: Header; + header: Header /** The accompanying extrinsics. */ - extrinsics: Hex[]; + extrinsics: Hex[] } // https://github.com/paritytech/substrate/blob/934fbfd/client/rpc-api/src/chain/mod.rs#L27 export type ChainRpc = { /** Get header. */ - chain_getHeader(hash?: Hash): RpcResult
; + chain_getHeader(hash?: Hash): RpcResult
/** Get header and body of a relay chain block. */ - chain_getBlock(hash?: Hash): RpcResult; + chain_getBlock(hash?: Hash): RpcResult /** * Get hash of the n-th block in the canon chain. * * By default returns latest block hash. */ - chain_getBlockHash(height?: ListOrValue): RpcResult>; - chain_getHead: ChainRpc["chain_getBlockHash"]; + chain_getBlockHash(height?: ListOrValue): RpcResult> + chain_getHead: ChainRpc["chain_getBlockHash"] /** Get hash of the last finalized block in the canon chain. */ - chain_getFinalizedHead(): RpcResult; - chain_getFinalisedHead: ChainRpc["chain_getFinalizedHead"]; + chain_getFinalizedHead(): RpcResult + chain_getFinalisedHead: ChainRpc["chain_getFinalizedHead"] /** All head subscription. */ - chain_subscribeAllHeads(): RpcResult>; + chain_subscribeAllHeads(): RpcResult> chain_unsubscribeAllHeads( subscription: Subscription<"chain_subscribeAllHeads", Header>, - ): RpcResult; + ): RpcResult /** New head subscription. */ - chain_subscribeNewHeads(): RpcResult>; + chain_subscribeNewHeads(): RpcResult> chain_unsubscribeNewHeads( subscription: Subscription<"chain_subscribeAllHeads", Header>, - ): RpcResult; + ): RpcResult /** Finalized head subscription. */ - chain_subscribeFinalizedHeads(): RpcResult>; + chain_subscribeFinalizedHeads(): RpcResult> chain_unsubscribeFinalizedHeads( subscription: Subscription<"chain_subscribeAllHeads", Header>, - ): RpcResult; - chain_subscribeFinalisedHeads: ChainRpc["chain_subscribeFinalizedHeads"]; - chain_unsubscribeFinalisedHeads: ChainRpc["chain_unsubscribeFinalizedHeads"]; -}; + ): RpcResult + chain_subscribeFinalisedHeads: ChainRpc["chain_subscribeFinalizedHeads"] + chain_unsubscribeFinalisedHeads: ChainRpc["chain_unsubscribeFinalizedHeads"] +} diff --git a/rpc/known/childstate.ts b/rpc/known/childstate.ts index 75e025623..13e6ddc5a 100644 --- a/rpc/known/childstate.ts +++ b/rpc/known/childstate.ts @@ -1,16 +1,16 @@ -import { Hash, Hex, RpcResult } from "./utils.ts"; +import { Hash, Hex, RpcResult } from "./utils.ts" // https://github.com/paritytech/substrate/blob/4d04aba/primitives/storage/src/lib.rs -export type StorageKey = Hex; -export type PrefixedStorageKey = Hex; -export type StorageData = Hex; +export type StorageKey = Hex +export type PrefixedStorageKey = Hex +export type StorageData = Hex // https://github.com/paritytech/substrate/blob/ded44948/client/rpc-api/src/state/helpers.rs#L27 export interface ReadProof { /** Block hash used to generate the proof */ - at: Hash; + at: Hash /** A proof used to prove that storage entries are included in the storage trie */ - proof: Hex[]; + proof: Hex[] } // https://github.com/paritytech/substrate/blob/934fbfd/client/rpc-api/src/child_state/mod.rs#L29 @@ -23,7 +23,7 @@ export type ChildStateRpc = { childStorageKey: PrefixedStorageKey, prefix: StorageKey, hash?: Hash, - ): RpcResult; + ): RpcResult /** * Returns the keys with prefix from a child storage with pagination support. * Up to `count` keys will be returned. @@ -35,35 +35,35 @@ export type ChildStateRpc = { count: number, startKey?: StorageKey, hash?: Hash, - ): RpcResult; + ): RpcResult /** Returns a child storage entry at a specific block's state. */ childState_getStorage( childStorageKey: PrefixedStorageKey, key: StorageKey, hash?: Hash, - ): RpcResult; + ): RpcResult /** Returns child storage entries for multiple keys at a specific block's state. */ childState_getStorageEntries( childStorageKey: PrefixedStorageKey, keys: StorageKey[], hash?: Hash, - ): RpcResult<(StorageData | null)[]>; + ): RpcResult<(StorageData | null)[]> /** Returns the hash of a child storage entry at a block's state. */ childState_getStorageHash( childStorageKey: PrefixedStorageKey, key: StorageKey, hash?: Hash, - ): RpcResult; + ): RpcResult /** Returns the size of a child storage entry at a block's state. */ childState_getStorageSize( childStorageKey: PrefixedStorageKey, key: StorageKey, hash?: Hash, - ): RpcResult; + ): RpcResult /** Returns proof of storage for child key entries at a specific block's state. */ state_getChildReadProof( childStorageKey: PrefixedStorageKey, keys: StorageKey[], hash?: Hash, - ): RpcResult; -}; + ): RpcResult +} diff --git a/rpc/known/contracts.ts b/rpc/known/contracts.ts index b6a8eb62c..608fab5bc 100644 --- a/rpc/known/contracts.ts +++ b/rpc/known/contracts.ts @@ -1,14 +1,14 @@ -import { AccountId, Hash, Hex, NumberOrHex, RpcResult, SerdeEnum, SerdeResult } from "./utils.ts"; +import { AccountId, Hash, Hex, NumberOrHex, RpcResult, SerdeEnum, SerdeResult } from "./utils.ts" // https://github.com/paritytech/substrate/blob/0246883/frame/contracts/rpc/src/lib.rs#L92 /** A struct that encodes RPC parameters required for a call to a smart-contract. */ export interface CallRequest { - origin: AccountId; - dest: AccountId; - value: NumberOrHex; - gasLimit: NumberOrHex; - storageDepositLimit: NumberOrHex | undefined; - inputData: Hex; + origin: AccountId + dest: AccountId + value: NumberOrHex + gasLimit: NumberOrHex + storageDepositLimit: NumberOrHex | undefined + inputData: Hex } // https://github.com/paritytech/substrate/blob/622f532/frame/contracts/common/src/lib.rs#L50 @@ -19,7 +19,7 @@ export interface CallRequest { */ export interface ContractResult { /** How much gas was consumed during execution. */ - gasConsumed: number; + gasConsumed: number /** * How much gas is required as gas limit in order to execute this call. * @@ -32,14 +32,14 @@ export interface ContractResult { * Additionally, any `seal_call` or `seal_instantiate` makes use of pre-charging * when a non-zero `gas_limit` argument is supplied. */ - gasRequired: number; + gasRequired: number /** * How much balance was deposited and reserved during execution in order to pay for storage. * * The storage deposit is never actually charged from the caller in case of [`Self::result`] * is `Err`. This is because on error all storage changes are rolled back. */ - storageDeposit: StorageDeposit; + storageDeposit: StorageDeposit /** * An optional debug message. This message is only filled when explicitly requested * by the code that calls into the contract. Otherwise it is empty. @@ -56,9 +56,9 @@ export interface ContractResult { * The debug message is never generated during on-chain execution. It is reserved for * RPC calls. */ - debugMessage: string; + debugMessage: string /** The execution result of the wasm code. */ - result: R; + result: R } // https://github.com/paritytech/substrate/blob/622f532/frame/contracts/common/src/lib.rs#L200 @@ -70,15 +70,15 @@ export type StorageDeposit = SerdeEnum<{ * This means that the specified amount of balance was transferred from the involved * contracts to the call origin. */ - Refund: NumberOrHex; + Refund: NumberOrHex /** * The transaction increased overall storage usage. * * This means that the specified amount of balance was transferred from the call origin * to the contracts involved. */ - Charge: NumberOrHex; -}>; + Charge: NumberOrHex +}> // https://github.com/paritytech/substrate/blob/622f532/frame/contracts/common/src/lib.rs#L118 /** Flags used by a contract to customize exit behavior. */ @@ -89,134 +89,134 @@ export enum ReturnFlags { /** Output of a contract call or instantiation which ran to completion. */ export interface ExecReturnValue { /** Flags passed along by `seal_return`. Empty when `seal_return` was never called. */ - flags: ReturnFlags; + flags: ReturnFlags /** Buffer passed along by `seal_return`. Empty when `seal_return` was never called. */ - data: Hex; + data: Hex } // https://github.com/paritytech/substrate/blob/dc22e48/primitives/runtime/src/lib.rs#L524 /** Reason why a dispatch call failed. */ export type DispatchError = SerdeEnum<{ /** Some error occurred. */ - Other: string; + Other: string /** Failed to lookup some data. */ - CannotLookup: void; + CannotLookup: void /** A bad origin. */ - BadOrigin: void; + BadOrigin: void /** A custom error in a module. */ - Module: ModuleError; + Module: ModuleError /** At least one consumer is remaining so the account cannot be destroyed. */ - ConsumerRemaining: void; + ConsumerRemaining: void /** There are no providers so the account cannot be created. */ - NoProviders: void; + NoProviders: void /** There are too many consumers so the account cannot be created. */ - TooManyConsumers: void; + TooManyConsumers: void /** An error to do with tokens. */ - Token: TokenError; + Token: TokenError /** An arithmetic error. */ - Arithmetic: ArithmeticError; + Arithmetic: ArithmeticError /** * The number of transactional layers has been reached, or we are not in a transactional * layer. */ - Transactional: TransactionalError; -}>; + Transactional: TransactionalError +}> // https://github.com/paritytech/substrate/blob/dc22e48/primitives/runtime/src/lib.rs#L479 /** Reason why a pallet call failed. */ export type ModuleError = { /** Module index, matching the metadata module index. */ - index: number; + index: number /** Module specific error value. */ - error: number; + error: number /** Optional error message. */ - message: string | undefined; -}; + message: string | undefined +} // https://github.com/paritytech/substrate/blob/dc22e48/primitives/runtime/src/lib.rs#L641 /** Arithmetic errors. */ export type ArithmeticError = SerdeEnum<{ /** Underflow. */ - Underflow: void; + Underflow: void /** Overflow. */ - Overflow: void; + Overflow: void /** Division by zero. */ - DivisionByZero: void; -}>; + DivisionByZero: void +}> // https://github.com/paritytech/substrate/blob/dc22e48/primitives/runtime/src/lib.rs#L601 /** Description of what went wrong when trying to complete an operation on a token. */ export type TokenError = SerdeEnum<{ /** Funds are unavailable. */ - NoFunds: void; + NoFunds: void /** Account that must exist would die. */ - WouldDie: void; + WouldDie: void /** Account cannot exist with the funds that would be given. */ - BelowMinimum: void; + BelowMinimum: void /** Account cannot be created. */ - CannotCreate: void; + CannotCreate: void /** The asset in question is unknown. */ - UnknownAsset: void; + UnknownAsset: void /** Funds exist but are frozen. */ - Frozen: void; + Frozen: void /** Operation is not supported by the asset. */ - Unsupported: void; -}>; + Unsupported: void +}> // https://github.com/paritytech/substrate/blob/dc22e48/primitives/runtime/src/lib.rs#L499 /** Errors related to transactional storage layers. */ export type TransactionalError = SerdeEnum<{ /** Too many transactional layers have been spawned. */ - LimitReached: void; + LimitReached: void /** A transactional layer was expected, but does not exist. */ - NoLayer: void; -}>; + NoLayer: void +}> // https://github.com/paritytech/substrate/blob/622f532/frame/contracts/common/src/lib.rs#L176 /** Reference to an existing code hash or a new wasm module */ export type Code = SerdeEnum<{ /** A wasm module as raw bytes. */ - upload: Hex; + upload: Hex /** The code hash of an on-chain wasm blob. */ - existing: Hash; -}>; + existing: Hash +}> // https://github.com/paritytech/substrate/blob/0246883/frame/contracts/rpc/src/lib.rs#L105 /** A struct that encodes RPC parameters required to instantiate a new smart-contract. */ export interface InstantiateRequest { - origin: AccountId; - value: NumberOrHex; - gasLimit: NumberOrHex; - storageDepositLimit: NumberOrHex | undefined; - code: Code; - data: Hex; - salt: Hex; + origin: AccountId + value: NumberOrHex + gasLimit: NumberOrHex + storageDepositLimit: NumberOrHex | undefined + code: Code + data: Hex + salt: Hex } // https://github.com/paritytech/substrate/blob/0246883/frame/contracts/rpc/src/lib.rs#L119 /** A struct that encodes RPC parameters required for a call to upload a new code. */ export interface CodeUploadRequest { - origin: AccountId; - code: Hex; - storageDepositLimit: NumberOrHex | undefined; + origin: AccountId + code: Hex + storageDepositLimit: NumberOrHex | undefined } // https://github.com/paritytech/substrate/blob/622f532/frame/contracts/common/src/lib.rs#L164 /** The result of successfully uploading a contract. */ export interface CodeUploadReturnValue { /** The key under which the new code is stored. */ - codeHash: Hash; + codeHash: Hash /** The deposit that was reserved at the caller. Is zero when the code already existed. */ - deposit: NumberOrHex; + deposit: NumberOrHex } // https://github.com/paritytech/substrate/blob/622f532/frame/contracts/common/src/lib.rs#L146 /** The result of a successful contract instantiation. */ export interface InstantiateReturnValue { /** The output of the called constructor. */ - result: ExecReturnValue; + result: ExecReturnValue /** The account id of the new contract. */ - account_id: AccountId; + account_id: AccountId } // https://github.com/paritytech/substrate/blob/0246883/frame/contracts/rpc/src/lib.rs#L127 @@ -233,7 +233,7 @@ export type ContractsRpc = { contracts_call( callRequest: CallRequest, at?: Hash, - ): RpcResult>>; + ): RpcResult>> /** * Instantiate a new contract. * @@ -244,7 +244,7 @@ export type ContractsRpc = { */ contracts_instantiate( instantiateRequest: InstantiateRequest, - ): RpcResult>>; + ): RpcResult>> /** * Upload new code without instantiating a contract from it. * @@ -256,7 +256,7 @@ export type ContractsRpc = { contracts_upload_code( uploadRequest: CodeUploadRequest, at?: Hash, - ): RpcResult>; + ): RpcResult> /** * Returns the value under a specified storage `key` in a contract given by `address` param, * or `None` if it is not set. @@ -265,5 +265,5 @@ export type ContractsRpc = { accountId: AccountId, key: Hex, aat?: Hash, - ): RpcResult; -}; + ): RpcResult +} diff --git a/rpc/known/framesystem.ts b/rpc/known/framesystem.ts index 8a8a7fbeb..b9e1d8079 100644 --- a/rpc/known/framesystem.ts +++ b/rpc/known/framesystem.ts @@ -1,4 +1,4 @@ -import { AccountId, Hash, Hex, RpcResult } from "./utils.ts"; +import { AccountId, Hash, Hex, RpcResult } from "./utils.ts" // https://github.com/paritytech/substrate/blob/eddf888/utils/frame/rpc/system/src/lib.rs#L41 export type FrameSystemRpc = { @@ -9,9 +9,9 @@ export type FrameSystemRpc = { * currently in the pool and if no transactions are found in the pool * it fallbacks to query the index from the runtime (aka. state nonce). */ - system_accountNextIndex(account: AccountId): RpcResult; - account_nextIndex: FrameSystemRpc["system_accountNextIndex"]; + system_accountNextIndex(account: AccountId): RpcResult + account_nextIndex: FrameSystemRpc["system_accountNextIndex"] /** Dry run an extrinsic at a given block. Return SCALE encoded ApplyExtrinsicResult. */ - system_dryRun(extrinsic: Hex, at?: Hash): RpcResult; - system_dryRunAt: FrameSystemRpc["system_dryRun"]; -}; + system_dryRun(extrinsic: Hex, at?: Hash): RpcResult + system_dryRunAt: FrameSystemRpc["system_dryRun"] +} diff --git a/rpc/known/grandpa.ts b/rpc/known/grandpa.ts index 8bd36a368..92e751f36 100644 --- a/rpc/known/grandpa.ts +++ b/rpc/known/grandpa.ts @@ -1,4 +1,4 @@ -import { Hex, RpcResult, Subscription } from "./utils.ts"; +import { Hex, RpcResult, Subscription } from "./utils.ts" // https://github.com/paritytech/substrate/blob/0ba251c/client/finality-grandpa/rpc/src/report.rs#L116 /** @@ -6,38 +6,38 @@ import { Hex, RpcResult, Subscription } from "./utils.ts"; * form suitable for serialization. */ export interface ReportedRoundStates { - setId: number; - best: RoundState; - background: RoundState[]; + setId: number + best: RoundState + background: RoundState[] } // https://github.com/paritytech/substrate/blob/0ba251c/client/finality-grandpa/rpc/src/report.rs#L76 export interface RoundState { - round: number; - totalWeight: number; - thresholdWeight: number; - prevotes: Prevotes; - precommits: Precommits; + round: number + totalWeight: number + thresholdWeight: number + prevotes: Prevotes + precommits: Precommits } // https://github.com/paritytech/substrate/blob/0ba251c/client/finality-grandpa/rpc/src/report.rs#L62 export interface Prevotes { - currentWeight: number; - missing: Hex[]; + currentWeight: number + missing: Hex[] } // https://github.com/paritytech/substrate/blob/0ba251c/client/finality-grandpa/rpc/src/report.rs#L69 export interface Precommits { - currentWeight: number; - missing: Hex[]; + currentWeight: number + missing: Hex[] } // https://github.com/paritytech/substrate/blob/ded44948/client/finality-grandpa/rpc/src/notification.rs /** An encoded justification proving that the given header has been finalized */ -export type JustificationNotification = Hex; +export type JustificationNotification = Hex // https://github.com/paritytech/substrate/blob/ded44948/client/finality-grandpa/rpc/src/finality.rs -export type EncodedFinalityProof = Hex; +export type EncodedFinalityProof = Hex // https://github.com/paritytech/substrate/blob/9b01569/client/finality-grandpa/rpc/src/lib.rs#L48 export type GrandpaRpc = { @@ -45,20 +45,20 @@ export type GrandpaRpc = { * Returns the state of the current best round state as well as the * ongoing background rounds. */ - grandpa_roundState(): RpcResult; + grandpa_roundState(): RpcResult /** * Returns the block most recently finalized by Grandpa, alongside * side its justification. */ grandpa_subscribeJustifications(): RpcResult< Subscription<"grandpa_subscribeJustifications", JustificationNotification> - >; + > grandpa_unsubscribeJustifications( subscription: Subscription<"grandpa_subscribeJustifications", JustificationNotification>, - ): void; + ): void /** * Prove finality for the given block number by returning the Justification for the last block * in the set and all the intermediary headers to link them together. */ - grandpa_proveFinality(block: number): RpcResult; -}; + grandpa_proveFinality(block: number): RpcResult +} diff --git a/rpc/known/mmr.ts b/rpc/known/mmr.ts index 245b0684d..f42bb62b4 100644 --- a/rpc/known/mmr.ts +++ b/rpc/known/mmr.ts @@ -1,4 +1,4 @@ -import { Hash, Hex, RpcResult } from "./utils.ts"; +import { Hash, Hex, RpcResult } from "./utils.ts" // https://github.com/paritytech/substrate/blob/6c5ac31/primitives/merkle-mountain-range/src/lib.rs#L37 /** @@ -8,28 +8,28 @@ import { Hash, Hex, RpcResult } from "./utils.ts"; * both leafs and inner nodes. Leafs will always have consecutive `LeafIndex`, * but might be actually at different positions in the MMR `NodeIndex`. */ -export type LeafIndex = number; +export type LeafIndex = number // https://github.com/paritytech/substrate/blob/eddf888/frame/merkle-mountain-range/rpc/src/lib.rs#L49 /** Retrieved MMR leaf and its proof. */ export interface LeafProof { /** Block hash the proof was generated for. */ - blockHash: Hash; + blockHash: Hash /** SCALE-encoded leaf data. */ - leaf: Hex; + leaf: Hex /** SCALE-encoded proof data. See [sp_mmr_primitives::Proof]. */ - proof: Hex; + proof: Hex } // https://github.com/paritytech/substrate/blob/eddf888/frame/merkle-mountain-range/rpc/src/lib.rs#L72 /** Retrieved MMR leaves and their proof. */ export interface LeafBatchProof { /** Block hash the proof was generated for. */ - blockHash: Hash; + blockHash: Hash /** SCALE-encoded vector of `LeafData`. */ - leaves: Hex; + leaves: Hex /** SCALE-encoded proof data. See [sp_mmr_primitives::Proof]. */ - proof: Hex; + proof: Hex } // https://github.com/paritytech/substrate/blob/eddf888/frame/merkle-mountain-range/rpc/src/lib.rs#L99 @@ -44,7 +44,7 @@ export type MmrRpc = { * Returns the (full) leaf itself and a proof for this leaf (compact encoding, i.e. hash of * the leaf). Both parameters are SCALE-encoded. */ - mmr_generateProof(leafIndex: LeafIndex, at?: Hash): RpcResult; + mmr_generateProof(leafIndex: LeafIndex, at?: Hash): RpcResult /** * Generate MMR proof for the given leaf indices. * @@ -57,5 +57,5 @@ export type MmrRpc = { * The order of entries in the `leaves` field of the returned struct * is the same as the order of the entries in `leaf_indices` supplied */ - mmr_generateBatchProof(leafIndices: LeafIndex[], at?: Hash): RpcResult; -}; + mmr_generateBatchProof(leafIndices: LeafIndex[], at?: Hash): RpcResult +} diff --git a/rpc/known/mod.ts b/rpc/known/mod.ts index 43fe16c6f..35bb925dd 100644 --- a/rpc/known/mod.ts +++ b/rpc/known/mod.ts @@ -1,16 +1,16 @@ -export * from "./author.ts"; -export * from "./babe.ts"; -export * from "./beefy.ts"; -export * from "./chain.ts"; -export * from "./childstate.ts"; -export * from "./contracts.ts"; -export * from "./framesystem.ts"; -export * from "./grandpa.ts"; -export * from "./mmr.ts"; -export * from "./mod.ts"; -export * from "./offchain.ts"; -export * from "./payment.ts"; -export * from "./state.ts"; -export * from "./statemigration.ts"; -export * from "./system.ts"; -export * from "./utils.ts"; +export * from "./author.ts" +export * from "./babe.ts" +export * from "./beefy.ts" +export * from "./chain.ts" +export * from "./childstate.ts" +export * from "./contracts.ts" +export * from "./framesystem.ts" +export * from "./grandpa.ts" +export * from "./mmr.ts" +export * from "./mod.ts" +export * from "./offchain.ts" +export * from "./payment.ts" +export * from "./state.ts" +export * from "./statemigration.ts" +export * from "./system.ts" +export * from "./utils.ts" diff --git a/rpc/known/offchain.ts b/rpc/known/offchain.ts index 110e66159..d53bd3043 100644 --- a/rpc/known/offchain.ts +++ b/rpc/known/offchain.ts @@ -1,4 +1,4 @@ -import { Hex, RpcResult, SerdeEnum } from "./utils.ts"; +import { Hex, RpcResult, SerdeEnum } from "./utils.ts" /** A type of supported crypto. */ export type StorageKind = SerdeEnum<{ @@ -10,20 +10,20 @@ export type StorageKind = SerdeEnum<{ * This storage can be used by offchain workers to handle forks * and coordinate offchain workers running on different forks. */ - PERSISTENT: void; + PERSISTENT: void /** * Local storage is revertible and fork-aware. It means that any value * set by the offchain worker triggered at block `N(hash1)` is reverted * if that block is reverted as non-canonical and is NOT available for the worker * that is re-run at block `N(hash2)`. */ - LOCAL: void; -}>; + LOCAL: void +}> // https://github.com/paritytech/substrate/blob/7d233c2/client/rpc-api/src/offchain/mod.rs#L28 export type OffchainRpc = { /** Set offchain local storage under given key and prefix. */ - offchain_localStorageSet(kind: StorageKind, key: Hex, value: Hex): RpcResult; + offchain_localStorageSet(kind: StorageKind, key: Hex, value: Hex): RpcResult /** Get offchain local storage under given key and prefix. */ - offchain_localStorageGet(kind: StorageKind, key: Hex): RpcResult; -}; + offchain_localStorageGet(kind: StorageKind, key: Hex): RpcResult +} diff --git a/rpc/known/payment.ts b/rpc/known/payment.ts index 753d2e30c..d124f383e 100644 --- a/rpc/known/payment.ts +++ b/rpc/known/payment.ts @@ -1,4 +1,4 @@ -import { Hash, Hex, NumberOrHex, SerdeEnum } from "./utils.ts"; +import { Hash, Hex, NumberOrHex, SerdeEnum } from "./utils.ts" // https://github.com/paritytech/substrate/blob/23bb5a6/frame/transaction-payment/src/types.rs#L99 /** @@ -6,9 +6,9 @@ import { Hash, Hex, NumberOrHex, SerdeEnum } from "./utils.ts"; * runtime. */ export interface RuntimeDispatchInfo { - weight: number; - class: DispatchClass; - partialFee: number; + weight: number + class: DispatchClass + partialFee: number } // https://github.com/paritytech/substrate/blob/23bb5a6255bbcd7ce2999044710428bc4a7a924f/frame/support/src/dispatch.rs#L140 @@ -20,9 +20,9 @@ export interface RuntimeDispatchInfo { */ export type DispatchClass = SerdeEnum<{ /** A normal dispatch. */ - normal: void; + normal: void /** An operational dispatch. */ - operational: void; + operational: void /** * A mandatory dispatch. These kinds of dispatch are always included regardless of their * weight, therefore it is critical that they are separately validated to ensure that a @@ -38,8 +38,8 @@ export type DispatchClass = SerdeEnum<{ * block. Essentially, we assume that in these exceptional circumstances, it is better to * allow an overweight block to be created than to not allow any block at all to be created. */ - mandatory: void; -}>; + mandatory: void +}> // https://github.com/paritytech/substrate/blob/23bb5a6/frame/transaction-payment/src/types.rs#L69 /** @@ -49,8 +49,8 @@ export type DispatchClass = SerdeEnum<{ * transactions can have a tip. */ export interface FeeDetails { - inclusionFee?: InclusionFee; - tip: NumberOrHex; + inclusionFee?: InclusionFee + tip: NumberOrHex } // https://github.com/paritytech/substrate/blob/23bb5a6/frame/transaction-payment/src/types.rs#L33 @@ -60,9 +60,9 @@ export interface InclusionFee { * This is the minimum amount a user pays for a transaction. It is declared * as a base _weight_ in the runtime and converted to a fee using `WeightToFee`. */ - baseFee: NumberOrHex; + baseFee: NumberOrHex /** The length fee, the amount paid for the encoded length (in bytes) of the transaction. */ - lenFee: NumberOrHex; + lenFee: NumberOrHex /** * - `targeted_fee_adjustment`: This is a multiplier that can tune the final fee based on the * congestion of the network. @@ -71,11 +71,11 @@ export interface InclusionFee { * * adjusted_weight_fee = targeted_fee_adjustment * weight_fee */ - adjustedWeightFee: NumberOrHex; + adjustedWeightFee: NumberOrHex } // https://github.com/paritytech/substrate/blob/eddf888/frame/transaction-payment/rpc/src/lib.rs#L41 export type TransactionPaymentApi = { - payment_queryInfo(extrinsic: Hex, at?: Hash): RuntimeDispatchInfo; - payment_queryFeeDetails(extrinsic: Hex, at?: Hash): FeeDetails; -}; + payment_queryInfo(extrinsic: Hex, at?: Hash): RuntimeDispatchInfo + payment_queryFeeDetails(extrinsic: Hex, at?: Hash): FeeDetails +} diff --git a/rpc/known/state.ts b/rpc/known/state.ts index f9a56807f..408f1e729 100644 --- a/rpc/known/state.ts +++ b/rpc/known/state.ts @@ -1,5 +1,5 @@ -import { ReadProof, StorageData, StorageKey } from "./childstate.ts"; -import { Hash, Hex, RpcResult, SerdeEnum, Subscription } from "./utils.ts"; +import { ReadProof, StorageData, StorageKey } from "./childstate.ts" +import { Hash, Hex, RpcResult, SerdeEnum, Subscription } from "./utils.ts" // https://github.com/paritytech/substrate/blob/01a3ad65/primitives/version/src/lib.rs#L161 /** @@ -16,7 +16,7 @@ export interface RuntimeVersion { * A different on-chain spec_name to that of the native runtime would normally result * in node not attempting to sync or author blocks. */ - specName: string; + specName: string /** * Name of the implementation of the spec. This is of little consequence for the node @@ -25,20 +25,20 @@ export interface RuntimeVersion { * Polkadot runtime (e.g. C++), then it would identify itself with an accordingly different * `impl_name`. */ - implName: string; + implName: string /** * `authoring_version` is the version of the authorship interface. An authoring node * will not attempt to author blocks unless this is equal to its native runtime. */ - authoringVersion: number; + authoringVersion: number /** * Version of the runtime specification. A full-node will not attempt to use its native * runtime in substitute for the on-chain Wasm runtime unless all of `spec_name`, * `spec_version` and `authoring_version` are the same between Wasm and native. */ - specVersion: number; + specVersion: number /** * Version of the implementation of the specification. Nodes are free to ignore this; it @@ -48,9 +48,9 @@ export interface RuntimeVersion { * Non-consensus-breaking optimizations are about the only changes that could be made which * would result in only the `impl_version` changing. */ - implVersion: number; + implVersion: number /** List of supported API "features" along with their versions. */ - apis: [Hash, Hex | undefined][]; + apis: [Hash, Hex | undefined][] /** * All existing dispatches are fully compatible when this number doesn't change. If this * number changes, then `spec_version` must change, also. @@ -62,82 +62,82 @@ export interface RuntimeVersion { * * It need *not* change when a new module is added or when a dispatchable is added. */ - transactionVersion: number; + transactionVersion: number /** * Version of the state implementation used by this runtime. * Use of an incorrect version is consensus breaking. */ - stateVersion: number; + stateVersion: number } // https://github.com/paritytech/substrate/blob/4d04aba/primitives/storage/src/lib.rs#L181 /** Storage change set */ export interface StorageChangeSet { /** Block hash */ - block: Hash; + block: Hash /** A list of changes */ - changes: [StorageKey, StorageData | null][]; + changes: [StorageKey, StorageData | null][] } // https://github.com/paritytech/substrate/blob/ded44948/primitives/rpc/src/tracing.rs#L96 /** Response for the `state_traceBlock` RPC. */ export type TraceBlockResponse = SerdeEnum<{ /** Error block tracing response */ - traceError: TraceError; + traceError: TraceError /** Successful block tracing response */ - blockTrace: BlockTrace; -}>; + blockTrace: BlockTrace +}> // https://github.com/paritytech/substrate/blob/ded44948/primitives/rpc/src/tracing.rs#L88 /** Error response for the `state_traceBlock` RPC. */ export interface TraceError { /** Error message */ - error: string; + error: string } // https://github.com/paritytech/substrate/blob/ded44948/primitives/rpc/src/tracing.rs#L27 export interface BlockTrace { /** Hash of the block being traced */ - blockHash: Hash; + blockHash: Hash /** Parent hash */ - parentHash: Hash; + parentHash: Hash /** * Module targets that were recorded by the tracing subscriber. * Empty string means record all targets. */ - tracingTargets: string; + tracingTargets: string /** * Storage key targets used to filter out events that do not have one of the storage keys. * Empty string means do not filter out any events. */ - storage_keys: string; + storage_keys: string /** * Method targets used to filter out events that do not have one of the event method. * Empty string means do not filter out any events. */ - methods: string; + methods: string /** Vec of tracing spans */ - spans: Span[]; + spans: Span[] /** Vec of tracing events */ - events: Event[]; + events: Event[] } // https://github.com/paritytech/substrate/blob/ded44948/primitives/rpc/src/tracing.rs#L50 /** Represents a tracing event, complete with recorded data. */ export interface Event { /** Event target */ - target: string; + target: string /** Associated data */ - data: Data; + data: Data /** Parent id, if it exists */ - parent_id?: number; + parent_id?: number } // https://github.com/paritytech/substrate/blob/ded44948/primitives/rpc/src/tracing.rs#L80 /** Holds associated values for a tracing span. */ export interface Data { /** HashMap of `String` values recorded while tracing */ - stringValues: Record; + stringValues: Record } // https://github.com/paritytech/substrate/blob/ded44948/primitives/rpc/src/tracing.rs#L64 @@ -148,29 +148,29 @@ export interface Data { */ export interface Span { /** id for this span */ - id: number; + id: number /** id of the parent span, if any */ - parentId?: number; + parentId?: number /** Name of this span */ - name: string; + name: string /** Target, typically module */ - target: string; + target: string /** Indicates if the span is from wasm */ - wasm: boolean; + wasm: boolean } // https://github.com/paritytech/substrate/blob/28ac0a8/client/rpc-api/src/state/mod.rs#L35 export type StateRpc = { /** Call a contract at a block's state. */ - state_call(name: string, bytes: Hex, at?: Hash): RpcResult; - state_callAt: StateRpc["state_call"]; + state_call(name: string, bytes: Hex, at?: Hash): RpcResult + state_callAt: StateRpc["state_call"] /** * Returns the keys with prefix, leave empty to get all the keys. * @deprecated [2.0.0] Please use `getKeysPaged` with proper paging support */ - state_getKeys(prefix: StorageKey, at?: Hash): RpcResult; + state_getKeys(prefix: StorageKey, at?: Hash): RpcResult /** Returns the keys with prefix, leave empty to get all the keys */ - state_getPairs(prefix: StorageKey, at?: Hash): RpcResult<[StorageKey, StorageData][]>; + state_getPairs(prefix: StorageKey, at?: Hash): RpcResult<[StorageKey, StorageData][]> /** * Returns the keys with prefix with pagination support. * Up to `count` keys will be returned. @@ -181,22 +181,22 @@ export type StateRpc = { count: number, startKey?: StorageKey, at?: Hash, - ): RpcResult; - state_getKeysPagedAt: StateRpc["state_getKeysPaged"]; + ): RpcResult + state_getKeysPagedAt: StateRpc["state_getKeysPaged"] /** Returns a storage entry at a specific block's state. */ - state_getStorage(key: StorageKey, at?: Hash): RpcResult; - state_getStorageAt: StateRpc["state_getStorage"]; + state_getStorage(key: StorageKey, at?: Hash): RpcResult + state_getStorageAt: StateRpc["state_getStorage"] /** Returns the hash of a storage entry at a block's state. */ - state_getStorageHash(key: StorageKey, at?: Hash): RpcResult; - state_getStorageHashAt: StateRpc["state_getStorageHash"]; + state_getStorageHash(key: StorageKey, at?: Hash): RpcResult + state_getStorageHashAt: StateRpc["state_getStorageHash"] /** Returns the size of a storage entry at a block's state. */ - state_getStorageSize(key: StorageKey, at?: Hash): RpcResult; - state_getStorageSizeAt: StateRpc["state_getStorageSize"]; + state_getStorageSize(key: StorageKey, at?: Hash): RpcResult + state_getStorageSizeAt: StateRpc["state_getStorageSize"] /** Returns the runtime metadata as an opaque blob. */ - state_getMetadata(at?: Hash): RpcResult; + state_getMetadata(at?: Hash): RpcResult /** Get the runtime version. */ - state_getRuntimeVersion(at?: Hash): RpcResult; - chain_getRuntimeVersion: StateRpc["state_getRuntimeVersion"]; + state_getRuntimeVersion(at?: Hash): RpcResult + chain_getRuntimeVersion: StateRpc["state_getRuntimeVersion"] /** * Query historical storage entries (by key) starting from a block given as the second * parameter. @@ -204,32 +204,32 @@ export type StateRpc = { * NOTE This first returned result contains the initial state of storage for all keys. * Subsequent values in the vector represent changes to the previous state (diffs). */ - state_queryStorage(keys: StorageKey[], block: Hash, at?: Hash): RpcResult; + state_queryStorage(keys: StorageKey[], block: Hash, at?: Hash): RpcResult /** Query storage entries (by key) starting at block hash given as the second parameter. */ - state_queryStorageAt(keys: StorageKey[], at?: Hash): RpcResult; + state_queryStorageAt(keys: StorageKey[], at?: Hash): RpcResult /** Returns proof of storage entries at a specific block's state. */ - state_getReadProof(keys: StorageKey[], at?: Hash): RpcResult; + state_getReadProof(keys: StorageKey[], at?: Hash): RpcResult /** New runtime version subscription */ state_subscribeRuntimeVersion(): RpcResult< Subscription<"state_subscribeRuntimeVersion", RuntimeVersion> - >; + > state_unsubscribeRuntimeVersion( subscription: Subscription<"state_subscribeRuntimeVersion", RuntimeVersion>, - ): RpcResult; - chain_subscribeRuntimeVersion: StateRpc["state_subscribeRuntimeVersion"]; - chain_unsubscribeRuntimeVersion: StateRpc["state_unsubscribeRuntimeVersion"]; + ): RpcResult + chain_subscribeRuntimeVersion: StateRpc["state_subscribeRuntimeVersion"] + chain_unsubscribeRuntimeVersion: StateRpc["state_unsubscribeRuntimeVersion"] /** New storage subscription */ state_subscribeStorage( keys: StorageKey[] | null, - ): RpcResult>; + ): RpcResult> state_unsubscribeStorage( subscription: Subscription<"state_subscribeStorage", StorageChangeSet>, - ): RpcResult; + ): RpcResult /** See https://paritytech.github.io/substrate/master/sc_rpc_api/state/trait.StateApiServer.html#tymethod.trace_block */ state_traceBlock( block: Hash, targets?: string, storageKeys?: string, methods?: string, - ): RpcResult; -}; + ): RpcResult +} diff --git a/rpc/known/statemigration.ts b/rpc/known/statemigration.ts index 280e9d946..de8c5a18c 100644 --- a/rpc/known/statemigration.ts +++ b/rpc/known/statemigration.ts @@ -1,9 +1,9 @@ -import { Hash, RpcResult } from "./utils.ts"; +import { Hash, RpcResult } from "./utils.ts" // https://github.com/paritytech/substrate/blob/00cc5f1/utils/frame/rpc/state-trie-migration-rpc/src/lib.rs#L106 export interface MigrationStatusResult { - topRemainingToMigrate: number; - childRemainingToMigrate: number; + topRemainingToMigrate: number + childRemainingToMigrate: number } // https://github.com/paritytech/substrate/blob/00cc5f1/utils/frame/rpc/state-trie-migration-rpc/src/lib.rs#L113 @@ -15,5 +15,5 @@ export type StateMigrationRpc = { * won't change any state. Nonetheless it is a VERY costly call that should be * only exposed to trusted peers. */ - state_trieMigrationStatus(at?: Hash): RpcResult; -}; + state_trieMigrationStatus(at?: Hash): RpcResult +} diff --git a/rpc/known/system.ts b/rpc/known/system.ts index 18b3661e3..ea2ee2451 100644 --- a/rpc/known/system.ts +++ b/rpc/known/system.ts @@ -1,76 +1,76 @@ -import { Hash, RpcResult, SerdeEnum } from "./utils.ts"; +import { Hash, RpcResult, SerdeEnum } from "./utils.ts" // https://github.com/paritytech/substrate/blob/57e3486/client/chain-spec/src/lib.rs#L198 export type ChainType = SerdeEnum<{ /** A development chain that runs mainly on one node. */ - Development: void; + Development: void /** A local chain that runs locally on multiple nodes for testing purposes. */ - Local: void; + Local: void /** A live chain. */ - Live: void; + Live: void /** Some custom chain type. */ - Custom: string; -}>; + Custom: string +}> // https://github.com/paritytech/substrate/blob/c172d0f/client/rpc-api/src/system/helpers.rs#L43 /** Health struct returned by the RPC */ export interface Health { /** Number of connected peers */ - peers: number; + peers: number /** Is the node syncing */ - isSyncing: boolean; + isSyncing: boolean /** * Should this node have any peers * * Might be false for local chains or when running without discovery. */ - shouldHavePeers: boolean; + shouldHavePeers: boolean } // https://github.com/paritytech/substrate/blob/c172d0f/client/rpc-api/src/system/helpers.rs#L63 /** Network Peer information */ export interface PeerInfo { /** Peer ID */ - peerId: string; + peerId: string /** Roles */ - roles: string; + roles: string /** Peer best block hash */ - best_hash: Hash; + best_hash: Hash /** Peer best block number */ - best_number: number; + best_number: number } // https://github.com/paritytech/substrate/blob/c172d0f/client/rpc-api/src/system/helpers.rs#L76 /** The role the node is running as */ export type NodeRole = SerdeEnum<{ /** The node is a full node */ - Full: void; + Full: void /** The node is an authority */ - Authority: void; -}>; + Authority: void +}> // https://github.com/paritytech/substrate/blob/c172d0f/client/rpc-api/src/system/helpers.rs#L86 export interface SyncState { /** Height of the block at which syncing started. */ - startingBlock: number; + startingBlock: number /** Height of the current best block of the node. */ - currentBlock: number; + currentBlock: number /** Height of the highest block in the network. */ - highestBlock: number; + highestBlock: number } // https://github.com/paritytech/substrate/blob/e0ccd00/client/rpc-api/src/system/mod.rs#L33 export type SystemRpc = { /** Get the node's implementation name. Plain old string. */ - system_name(): RpcResult; + system_name(): RpcResult /** Get the node implementation's version. Should be a semver string. */ - system_version(): RpcResult; + system_version(): RpcResult /** Get the chain's name. Given as a string identifier. */ - system_chain(): RpcResult; + system_chain(): RpcResult /** Get the chain's type. */ - system_chainType(): RpcResult; + system_chainType(): RpcResult /** Get a custom set of properties as a JSON object, defined in the chain spec. */ - system_properties(): RpcResult>; + system_properties(): RpcResult> /** * Return health status of the node. * @@ -78,18 +78,18 @@ export type SystemRpc = { * - connected to some peers (unless running in dev mode) * - not performing a major sync */ - system_health(): RpcResult; + system_health(): RpcResult /** Returns the base58-encoded PeerId of the node. */ - system_localPeerId(): RpcResult; + system_localPeerId(): RpcResult /** * Returns the multi-addresses that the local node is listening on * * The addresses include a trailing `/p2p/` with the local PeerId, and are thus suitable to * be passed to `addReservedPeer` or as a bootnode address for example. */ - system_localListenAddresses(): RpcResult; + system_localListenAddresses(): RpcResult /** Returns currently connected peers */ - system_peers(): RpcResult; + system_peers(): RpcResult /** * Returns current state of the network. * @@ -98,7 +98,7 @@ export type SystemRpc = { */ // TODO: the future of this call is uncertain: https://github.com/paritytech/substrate/issues/1890 // https://github.com/paritytech/substrate/issues/5541 - system_networkState(): RpcResult; + system_networkState(): RpcResult /** * Adds a reserved peer. Returns the empty string or an error. The string * parameter should encode a `p2p` multiaddr. @@ -106,21 +106,21 @@ export type SystemRpc = { * `/ip4/198.51.100.19/tcp/30333/p2p/QmSk5HQbn6LhUwDiNMseVUjuRYhEtYj4aUZ6WfWoGURpdV` * is an example of a valid, passing multiaddr with PeerId attached. */ - system_addReservedPeer(peer: string): RpcResult; + system_addReservedPeer(peer: string): RpcResult /** * Remove a reserved peer. Returns the empty string or an error. The string * should encode only the PeerId e.g. `QmSk5HQbn6LhUwDiNMseVUjuRYhEtYj4aUZ6WfWoGURpdV`. */ - system_removeReservedPeer(peerId: string): RpcResult; + system_removeReservedPeer(peerId: string): RpcResult /** Returns the list of reserved peers */ - system_reservedPeers(): RpcResult; + system_reservedPeers(): RpcResult /** Returns the roles the node is running as. */ - system_nodeRoles(): RpcResult; + system_nodeRoles(): RpcResult /** * Returns the state of the syncing of the node: starting block, current best block, highest * known block. */ - system_syncState(): RpcResult; + system_syncState(): RpcResult /** * Adds the supplied directives to the current log filter * @@ -128,7 +128,7 @@ export type SystemRpc = { * * `sync=debug,state=trace` */ - system_addLogFilter(directives: string): RpcResult; + system_addLogFilter(directives: string): RpcResult /** Resets the log filter to Substrate defaults */ - system_resetLogFilter(): RpcResult; -}; + system_resetLogFilter(): RpcResult +} diff --git a/rpc/known/utils.ts b/rpc/known/utils.ts index 19094b882..789737fc9 100644 --- a/rpc/known/utils.ts +++ b/rpc/known/utils.ts @@ -1,16 +1,16 @@ -import { Expand } from "../../deps/scale.ts"; -import * as U from "../../util/mod.ts"; +import { Expand } from "../../deps/scale.ts" +import * as U from "../../util/mod.ts" -export type SerdeResult = SerdeEnum<{ Ok: O; Err: E }>; +export type SerdeResult = SerdeEnum<{ Ok: O; Err: E }> export type SerdeEnum = { - [K in keyof T]: T[K] extends void ? K : Expand & Omit<{ [K in keyof T]?: never }, K>>; -}[keyof T]; + [K in keyof T]: T[K] extends void ? K : Expand & Omit<{ [K in keyof T]?: never }, K>> +}[keyof T] -export type Hex = U.Hex; -export type Hash = U.HexHash; -export type SubId = string; -export type AccountId = string; -export type Subscription = string & { _subscription: [T, U] }; -export type NumberOrHex = U.HexEncoded | number; -export type ListOrValue = T | T[]; -export type RpcResult = T; +export type Hex = U.Hex +export type Hash = U.HexHash +export type SubId = string +export type AccountId = string +export type Subscription = string & { _subscription: [T, U] } +export type NumberOrHex = U.HexEncoded | number +export type ListOrValue = T | T[] +export type RpcResult = T diff --git a/rpc/messages.ts b/rpc/messages.ts index 0b062c7d2..eb866ba5e 100644 --- a/rpc/messages.ts +++ b/rpc/messages.ts @@ -1,49 +1,49 @@ export interface EgressMessage extends JsonRpcVersionBearer { - method: Method; - id: string; - params: Params; + method: Method + id: string + params: Params } -export type IngressMessage = OkMessage | ErrorMessage | NotificationMessage; +export type IngressMessage = OkMessage | ErrorMessage | NotificationMessage export interface OkMessage extends JsonRpcVersionBearer { - id: string; - result: Result; - params?: never; - error?: never; + id: string + result: Result + params?: never + error?: never } export interface ErrorMessage extends JsonRpcVersionBearer { - id: string; + id: string error: { - code: number; - message: string; - data: Data; - }; - params?: never; - result?: never; + code: number + message: string + data: Data + } + params?: never + result?: never } export interface NotificationMessage extends JsonRpcVersionBearer { - method: Method; - id?: never; + method: Method + id?: never params: { - subscription: string; - result: Result; - }; - result?: never; - error?: never; + subscription: string + result: Result + } + result?: never + error?: never } interface JsonRpcVersionBearer { - jsonrpc: "2.0"; + jsonrpc: "2.0" } export function parse(raw: string) { // TODO - return JSON.parse(raw); + return JSON.parse(raw) } diff --git a/rpc/mod.ts b/rpc/mod.ts index 1c4ffdbcb..f28315eab 100644 --- a/rpc/mod.ts +++ b/rpc/mod.ts @@ -1,7 +1,7 @@ -export * from "./client.ts"; -export * as known from "./known/mod.ts"; -export * as msg from "./messages.ts"; -export * from "./provider/base.ts"; -export * from "./provider/errors.ts"; -export * from "./provider/proxy.ts"; -export * from "./provider/smoldot.ts"; +export * from "./client.ts" +export * as known from "./known/mod.ts" +export * as msg from "./messages.ts" +export * from "./provider/base.ts" +export * from "./provider/errors.ts" +export * from "./provider/proxy.ts" +export * from "./provider/smoldot.ts" diff --git a/rpc/provider/base.ts b/rpc/provider/base.ts index 45cb29daf..5cb1107ea 100644 --- a/rpc/provider/base.ts +++ b/rpc/provider/base.ts @@ -1,6 +1,6 @@ -import * as U from "../../util/mod.ts"; -import * as msg from "../messages.ts"; -import { ProviderCloseError, ProviderHandlerError, ProviderSendError } from "./errors.ts"; +import * as U from "../../util/mod.ts" +import * as msg from "../messages.ts" +import { ProviderCloseError, ProviderHandlerError, ProviderSendError } from "./errors.ts" /** * @param discoveryValue the value with which to discover the given chain @@ -14,18 +14,18 @@ export type Provider< > = ( discoveryValue: DiscoveryValue, listener: ProviderListener, -) => ProviderRef; +) => ProviderRef export type ProviderListener = U.Listener< | msg.IngressMessage | ProviderSendError | ProviderHandlerError ->; +> export interface ProviderRef { - nextId(): string; - send(message: msg.EgressMessage): void; - release(): Promise>; + nextId(): string + send(message: msg.EgressMessage): void + release(): Promise> } export class ProviderConnection { @@ -33,7 +33,7 @@ export class ProviderConnection { listeners = new Map< ProviderListener, ProviderListener - >(); + >() /** * @param inner the underlying representation of the connection (such as a WebSocket or smoldot chain) @@ -43,17 +43,17 @@ export class ProviderConnection { addListener = (listener: ProviderListener) => { if (this.listeners.has(listener)) { - return; + return } this.listeners.set( listener, listener.bind({ stop: () => { - this.listeners.delete(listener); + this.listeners.delete(listener) }, }), - ); - }; + ) + } /** * Execute each listener in sequence @@ -61,14 +61,14 @@ export class ProviderConnection { */ forEachListener: ProviderListener = (message) => { for (const listener of this.listeners.values()) { - listener(message); + listener(message) } - }; + } } export function nextIdFactory() { - let i = 0; + let i = 0 return () => { - return (i++).toString(); - }; + return (i++).toString() + } } diff --git a/rpc/provider/errors.ts b/rpc/provider/errors.ts index 0122d4811..ef0f6e6cf 100644 --- a/rpc/provider/errors.ts +++ b/rpc/provider/errors.ts @@ -1,25 +1,25 @@ -import { msg } from "../mod.ts"; +import { msg } from "../mod.ts" export class ProviderSendError extends Error { - override readonly name = "ProviderSendError"; + override readonly name = "ProviderSendError" constructor( override readonly cause: Data, readonly egressMessage: msg.EgressMessage, ) { - super(); + super() } } export class ProviderHandlerError extends Error { - override readonly name = "ProviderInternalError"; + override readonly name = "ProviderInternalError" constructor(override readonly cause: Data) { - super(); + super() } } export class ProviderCloseError extends Error { - override readonly name = "ProviderCloseError"; + override readonly name = "ProviderCloseError" constructor(override readonly cause: Data) { - super(); + super() } } diff --git a/rpc/provider/proxy.test.ts b/rpc/provider/proxy.test.ts index d5cf2fc04..7bcb9e0ac 100644 --- a/rpc/provider/proxy.test.ts +++ b/rpc/provider/proxy.test.ts @@ -1,25 +1,25 @@ -import { deferred } from "../../deps/std/async.ts"; -import { assertExists, assertNotInstanceOf } from "../../deps/std/testing/asserts.ts"; -import * as T from "../../test_util/mod.ts"; -import { proxyProvider } from "./proxy.ts"; +import { deferred } from "../../deps/std/async.ts" +import { assertExists, assertNotInstanceOf } from "../../deps/std/testing/asserts.ts" +import * as T from "../../test_util/mod.ts" +import { proxyProvider } from "./proxy.ts" Deno.test({ name: "Proxy Provider", async fn() { - const stopped = deferred(); + const stopped = deferred() const provider = proxyProvider(await T.polkadot.url, (message) => { - assertNotInstanceOf(message, Error); - assertExists(message.result); - stopped.resolve(); - }); + assertNotInstanceOf(message, Error) + assertExists(message.result) + stopped.resolve() + }) provider.send({ jsonrpc: "2.0", id: provider.nextId(), method: "system_health", params: [], - }); - await stopped; - const providerRelease = await provider.release(); - assertNotInstanceOf(providerRelease, Error); + }) + await stopped + const providerRelease = await provider.release() + assertNotInstanceOf(providerRelease, Error) }, -}); +}) diff --git a/rpc/provider/proxy.ts b/rpc/provider/proxy.ts index 683dcf451..1ab861267 100644 --- a/rpc/provider/proxy.ts +++ b/rpc/provider/proxy.ts @@ -1,95 +1,95 @@ -import * as U from "../../util/mod.ts"; -import * as msg from "../messages.ts"; -import { nextIdFactory, Provider, ProviderConnection, ProviderListener } from "./base.ts"; -import { ProviderCloseError, ProviderHandlerError, ProviderSendError } from "./errors.ts"; +import * as U from "../../util/mod.ts" +import * as msg from "../messages.ts" +import { nextIdFactory, Provider, ProviderConnection, ProviderListener } from "./base.ts" +import { ProviderCloseError, ProviderHandlerError, ProviderSendError } from "./errors.ts" /** Global lookup of existing connections */ -const connections = new Map(); -type ProxyProviderConnection = ProviderConnection; +const connections = new Map() +type ProxyProviderConnection = ProviderConnection -const nextId = nextIdFactory(); +const nextId = nextIdFactory() export const proxyProvider: Provider = (url, listener) => { return { nextId, send: (message) => { - const conn = connection(url, listener); - (async () => { - const openError = await ensureWsOpen(conn.inner); + const conn = connection(url, listener) + ;(async () => { + const openError = await ensureWsOpen(conn.inner) if (openError) { - conn.forEachListener(new ProviderSendError(openError, message)); - return; + conn.forEachListener(new ProviderSendError(openError, message)) + return } try { - conn.inner.send(JSON.stringify(message)); + conn.inner.send(JSON.stringify(message)) } catch (error) { - listener(new ProviderSendError(error as Event, message)); + listener(new ProviderSendError(error as Event, message)) } - })(); + })() }, release: () => { - const { cleanUp, listeners, inner } = connection(url, listener); - listeners.delete(listener); + const { cleanUp, listeners, inner } = connection(url, listener) + listeners.delete(listener) if (!listeners.size) { - connections.delete(url); - cleanUp(); - return closeWs(inner); + connections.delete(url) + cleanUp() + return closeWs(inner) } - return Promise.resolve(undefined); + return Promise.resolve(undefined) }, - }; -}; + } +} function connection( url: string, listener: ProviderListener, ): ProxyProviderConnection { const conn = U.getOrInit(connections, url, () => { - const controller = new AbortController(); - const ws = new WebSocket(url); + const controller = new AbortController() + const ws = new WebSocket(url) ws.addEventListener("message", (e) => { - conn!.forEachListener(msg.parse(e.data)); - }, controller); + conn!.forEachListener(msg.parse(e.data)) + }, controller) ws.addEventListener("error", (e) => { - conn!.forEachListener(new ProviderHandlerError(e)); - }, controller); - return new ProviderConnection(ws, () => controller.abort()); - }); - conn.addListener(listener); - return conn; + conn!.forEachListener(new ProviderHandlerError(e)) + }, controller) + return new ProviderConnection(ws, () => controller.abort()) + }) + conn.addListener(listener) + return conn } function ensureWsOpen(ws: WebSocket): Promise { if (ws.readyState === WebSocket.OPEN) { - return Promise.resolve(undefined); + return Promise.resolve(undefined) } return new Promise((resolve) => { - const controller = new AbortController(); + const controller = new AbortController() ws.addEventListener("open", () => { - controller.abort(); - resolve(undefined); - }, controller); + controller.abort() + resolve(undefined) + }, controller) ws.addEventListener("error", (e) => { - controller.abort(); - resolve(e); - }, controller); - }); + controller.abort() + resolve(e) + }, controller) + }) } function closeWs(socket: WebSocket): Promise> { if (socket.readyState === WebSocket.CLOSED) { - return Promise.resolve(undefined); + return Promise.resolve(undefined) } return new Promise>((resolve) => { - const controller = new AbortController(); + const controller = new AbortController() socket.addEventListener("close", () => { - controller.abort(); - resolve(undefined); - }, controller); + controller.abort() + resolve(undefined) + }, controller) socket.addEventListener("error", (e) => { - controller.abort(); - resolve(new ProviderCloseError(e)); - }, controller); - socket.close(); - }); + controller.abort() + resolve(new ProviderCloseError(e)) + }, controller) + socket.close() + }) } diff --git a/rpc/provider/smoldot.test.ts b/rpc/provider/smoldot.test.ts index feb7ad35f..f3b30535c 100644 --- a/rpc/provider/smoldot.test.ts +++ b/rpc/provider/smoldot.test.ts @@ -1,7 +1,7 @@ -import { deferred } from "../../deps/std/async.ts"; -import { assertExists, assertNotInstanceOf } from "../../deps/std/testing/asserts.ts"; -import { ProviderListener } from "./base.ts"; -import { smoldotProvider } from "./smoldot.ts"; +import { deferred } from "../../deps/std/async.ts" +import { assertExists, assertNotInstanceOf } from "../../deps/std/testing/asserts.ts" +import { ProviderListener } from "./base.ts" +import { smoldotProvider } from "./smoldot.ts" Deno.test({ name: "Smoldot Provider", @@ -16,59 +16,59 @@ Deno.test({ "https://raw.githubusercontent.com/paritytech/substrate-connect/main/packages/connect/src/connector/specs/polkadot.json", ) ) - .text(); - const pendingSubscriptionId = deferred(); - const initialized = deferred(); - const unsubscribed = deferred(); + .text() + const pendingSubscriptionId = deferred() + const initialized = deferred() + const unsubscribed = deferred() const checks: ProviderListener[] = [ // check for chainHead_unstable_follow subscription (message) => { - assertNotInstanceOf(message, Error); - assertExists(message.result); - pendingSubscriptionId.resolve(message.result); + assertNotInstanceOf(message, Error) + assertExists(message.result) + pendingSubscriptionId.resolve(message.result) }, // check for chainHead_unstable_follow initialized event (message) => { - assertNotInstanceOf(message, Error); - assertExists(message.params?.result); + assertNotInstanceOf(message, Error) + assertExists(message.params?.result) if (message.params?.result.event === "initialized") { - initialized.resolve(); + initialized.resolve() } }, // check for chainHead_unstable_unfollow unsubscribe (message) => { - assertNotInstanceOf(message, Error); + assertNotInstanceOf(message, Error) if (message?.result === null) { - unsubscribed.resolve(); + unsubscribed.resolve() } }, - ]; + ] const provider = smoldotProvider({ relayChainSpec }, (message) => { if (checks.length > 1) { - checks.shift()!(message); + checks.shift()!(message) } else { - checks[0]!(message); + checks[0]!(message) } - }); + }) provider.send({ jsonrpc: "2.0", id: provider.nextId(), method: "chainHead_unstable_follow", params: [false], - }); - const subscriptionId = await pendingSubscriptionId; - await initialized; + }) + const subscriptionId = await pendingSubscriptionId + await initialized provider.send({ jsonrpc: "2.0", id: provider.nextId(), method: "chainHead_unstable_unfollow", params: [subscriptionId], - }); - await unsubscribed; - const providerRelease = await provider.release(); - assertNotInstanceOf(providerRelease, Error); + }) + await unsubscribed + const providerRelease = await provider.release() + assertNotInstanceOf(providerRelease, Error) }, - }); + }) await t.step({ name: "parachain connection", async fn() { @@ -77,67 +77,67 @@ Deno.test({ "https://raw.githubusercontent.com/paritytech/substrate-connect/main/packages/connect/src/connector/specs/westend2.json", ) ) - .text(); + .text() const parachainSpec = await ( await fetch( "https://raw.githubusercontent.com/paritytech/substrate-connect/main/projects/demo/src/assets/westend-westmint.json", ) ) - .text(); - const pendingSubscriptionId = deferred(); - const initialized = deferred(); - const unsubscribed = deferred(); + .text() + const pendingSubscriptionId = deferred() + const initialized = deferred() + const unsubscribed = deferred() const checks: ProviderListener[] = [ // check for chainHead_unstable_follow subscription (message) => { - assertNotInstanceOf(message, Error); - assertExists(message.result); - pendingSubscriptionId.resolve(message.result); + assertNotInstanceOf(message, Error) + assertExists(message.result) + pendingSubscriptionId.resolve(message.result) }, // check for chainHead_unstable_follow initialized event (message) => { - assertNotInstanceOf(message, Error); - assertExists(message.params?.result); + assertNotInstanceOf(message, Error) + assertExists(message.params?.result) if (message.params?.result.event === "initialized") { - initialized.resolve(); + initialized.resolve() } }, // check for chainHead_unstable_unfollow unsubscribe (message) => { - assertNotInstanceOf(message, Error); + assertNotInstanceOf(message, Error) if (message?.result === null) { - unsubscribed.resolve(); + unsubscribed.resolve() } }, - ]; + ] const provider = smoldotProvider( { parachainSpec, relayChainSpec }, (message) => { if (checks.length > 1) { - checks.shift()!(message); + checks.shift()!(message) } else { - checks[0]!(message); + checks[0]!(message) } }, - ); + ) provider.send({ jsonrpc: "2.0", id: provider.nextId(), method: "chainHead_unstable_follow", params: [false], - }); - const subscriptionId = await pendingSubscriptionId; - await initialized; + }) + const subscriptionId = await pendingSubscriptionId + await initialized provider.send({ jsonrpc: "2.0", id: provider.nextId(), method: "chainHead_unstable_unfollow", params: [subscriptionId], - }); - await unsubscribed; - const providerRelease = await provider.release(); - assertNotInstanceOf(providerRelease, Error); + }) + await unsubscribed + const providerRelease = await provider.release() + assertNotInstanceOf(providerRelease, Error) }, - }); + }) }, -}); +}) diff --git a/rpc/provider/smoldot.ts b/rpc/provider/smoldot.ts index 5b7709982..943d7454b 100644 --- a/rpc/provider/smoldot.ts +++ b/rpc/provider/smoldot.ts @@ -6,39 +6,39 @@ import { MalformedJsonRpcError, QueueFullError, start, -} from "../../deps/smoldot.ts"; -import { Chain, Client, ClientOptions } from "../../deps/smoldot/client.d.ts"; -import { deferred } from "../../deps/std/async.ts"; -import * as msg from "../messages.ts"; -import { nextIdFactory, Provider, ProviderConnection, ProviderListener } from "./base.ts"; -import { ProviderCloseError, ProviderHandlerError, ProviderSendError } from "./errors.ts"; +} from "../../deps/smoldot.ts" +import { Chain, Client, ClientOptions } from "../../deps/smoldot/client.d.ts" +import { deferred } from "../../deps/std/async.ts" +import * as msg from "../messages.ts" +import { nextIdFactory, Provider, ProviderConnection, ProviderListener } from "./base.ts" +import { ProviderCloseError, ProviderHandlerError, ProviderSendError } from "./errors.ts" type SmoldotSendErrorData = | AlreadyDestroyedError | CrashError | JsonRpcDisabledError | MalformedJsonRpcError - | QueueFullError; + | QueueFullError type SmoldotHandlerErrorData = | AlreadyDestroyedError | CrashError | JsonRpcDisabledError - | AddChainError; -type SmoldotCloseErrorData = AlreadyDestroyedError | CrashError; + | AddChainError +type SmoldotCloseErrorData = AlreadyDestroyedError | CrashError -let client: undefined | Client; -const connections = new Map(); +let client: undefined | Client +const connections = new Map() class SmoldotProviderConnection extends ProviderConnection {} -const nextId = nextIdFactory(); +const nextId = nextIdFactory() export interface SmoldotProviderProps { - relayChainSpec: string; - parachainSpec?: string; + relayChainSpec: string + parachainSpec?: string // TODO: support deferring closing (how / what heuristic?) - deferClosing?: boolean; + deferClosing?: boolean } export const smoldotProvider: Provider< @@ -50,38 +50,38 @@ export const smoldotProvider: Provider< return { nextId, send: (message) => { - (async () => { - let conn: SmoldotProviderConnection; + ;(async () => { + let conn: SmoldotProviderConnection try { - conn = await connection(props, listener); + conn = await connection(props, listener) } catch (error) { - listener(new ProviderHandlerError(error as SmoldotHandlerErrorData)); - return; + listener(new ProviderHandlerError(error as SmoldotHandlerErrorData)) + return } try { - conn.inner.sendJsonRpc(JSON.stringify(message)); + conn.inner.sendJsonRpc(JSON.stringify(message)) } catch (error) { - listener(new ProviderSendError(error as SmoldotSendErrorData, message)); + listener(new ProviderSendError(error as SmoldotSendErrorData, message)) } - })(); + })() }, release: async () => { - const { cleanUp, listeners, inner } = await connection(props, listener); - listeners.delete(listener); + const { cleanUp, listeners, inner } = await connection(props, listener) + listeners.delete(listener) if (!listeners.size) { - connections.delete(props); - cleanUp(); + connections.delete(props) + cleanUp() try { // TODO: utilize `deferClosing` prop once we flesh out approach - inner.remove(); + inner.remove() } catch (e) { - return new ProviderCloseError(e as SmoldotCloseErrorData); + return new ProviderCloseError(e as SmoldotCloseErrorData) } } - return; + return }, - }; -}; + } +} async function connection( props: SmoldotProviderProps, @@ -94,44 +94,44 @@ async function connection( forbidNonLocalWs: true, cpuRateLimit: .25, } as ClientOptions, - ); + ) } - let conn = connections.get(props); + let conn = connections.get(props) if (!conn) { - let inner: Chain; + let inner: Chain if (props.parachainSpec) { const relayChainConnection = await client.addChain({ chainSpec: props.relayChainSpec, - }); + }) inner = await client.addChain({ chainSpec: props.parachainSpec, potentialRelayChains: [relayChainConnection], - }); + }) } else { - inner = await client.addChain({ chainSpec: props.relayChainSpec }); + inner = await client.addChain({ chainSpec: props.relayChainSpec }) } - const stopListening = deferred(); - conn = new SmoldotProviderConnection(inner, () => stopListening.resolve()); - connections.set(props, conn); - (async () => { + const stopListening = deferred() + conn = new SmoldotProviderConnection(inner, () => stopListening.resolve()) + connections.set(props, conn) + ;(async () => { while (true) { try { const response = await Promise.race([ stopListening, inner.nextJsonRpcResponse(), - ]); + ]) if (!response) { - break; + break } - const message = msg.parse(response); - conn!.forEachListener(message); + const message = msg.parse(response) + conn!.forEachListener(message) } catch (e) { - conn!.forEachListener(new ProviderHandlerError(e as SmoldotHandlerErrorData)); - break; + conn!.forEachListener(new ProviderHandlerError(e as SmoldotHandlerErrorData)) + break } } - })(); + })() } - conn.addListener(listener); - return conn; + conn.addListener(listener) + return conn } diff --git a/ss58/mod.test.ts b/ss58/mod.test.ts index 21aef5ccd..f52ab0086 100644 --- a/ss58/mod.test.ts +++ b/ss58/mod.test.ts @@ -1,6 +1,6 @@ -import { assertEquals, assertInstanceOf } from "../deps/std/testing/asserts.ts"; -import * as T from "../test_util/mod.ts"; -import * as ss58 from "./mod.ts"; +import { assertEquals, assertInstanceOf } from "../deps/std/testing/asserts.ts" +import * as T from "../test_util/mod.ts" +import * as ss58 from "./mod.ts" for ( const [networkName, address, [prefix, publicKey]] of [ @@ -22,37 +22,37 @@ for ( ] as const ) { Deno.test(`ss58.encode ${networkName}`, () => { - const actual = ss58.encode(prefix, publicKey); - assertEquals(actual, address); - }); + const actual = ss58.encode(prefix, publicKey) + assertEquals(actual, address) + }) Deno.test(`ss58.decode ${networkName}`, () => { - const actual = ss58.decode(address); - assertEquals(actual, [prefix, publicKey]); - }); + const actual = ss58.decode(address) + assertEquals(actual, [prefix, publicKey]) + }) } Deno.test("ss58.encode invalid public key length", () => { assertInstanceOf( ss58.encode(0, T.alice.publicKey.slice(0, 30)), ss58.InvalidPublicKeyLengthError, - ); -}); + ) +}) Deno.test("ss58.encode invalid network prefix", () => { - assertInstanceOf(ss58.encode(46, T.alice.publicKey, [0]), ss58.InvalidNetworkPrefixError); -}); + assertInstanceOf(ss58.encode(46, T.alice.publicKey, [0]), ss58.InvalidNetworkPrefixError) +}) Deno.test("ss58.decodeRaw long address", () => { - assertInstanceOf(ss58.decodeRaw(new Uint8Array(40)), ss58.InvalidAddressLengthError); -}); + assertInstanceOf(ss58.decodeRaw(new Uint8Array(40)), ss58.InvalidAddressLengthError) +}) Deno.test("ss58.decodeRaw short address", () => { - assertInstanceOf(ss58.decodeRaw(new Uint8Array(30)), ss58.InvalidAddressLengthError); -}); + assertInstanceOf(ss58.decodeRaw(new Uint8Array(30)), ss58.InvalidAddressLengthError) +}) Deno.test("ss58.decodeRaw invalid checksum", () => { assertInstanceOf( ss58.decodeRaw(Uint8Array.of(0, ...T.alice.publicKey, 255, 255)), ss58.InvalidAddressChecksumError, - ); -}); + ) +}) diff --git a/ss58/mod.ts b/ss58/mod.ts index fd1526013..d3d7adb4e 100644 --- a/ss58/mod.ts +++ b/ss58/mod.ts @@ -1,44 +1,44 @@ -import * as base58 from "../deps/std/encoding/base58.ts"; -import { Blake2b } from "../hashers/blake2b.ts"; +import * as base58 from "../deps/std/encoding/base58.ts" +import { Blake2b } from "../hashers/blake2b.ts" // SS58PRE string (0x53533538505245 hex) encoded as Uint8Array -const SS58PRE = Uint8Array.of(83, 83, 53, 56, 80, 82, 69); -const CHECKSUM_LENGTH = 2; +const SS58PRE = Uint8Array.of(83, 83, 53, 56, 80, 82, 69) +const CHECKSUM_LENGTH = 2 const VALID_ADDRESS_LENGTHS: Record = { 35: true, 36: true, 37: true, 38: true, -}; +} const VALID_PUBLIC_KEY_LENGTHS: Record = { 32: true, 33: true, -}; +} export const encode = ( prefix: number, pubKey: Uint8Array, validNetworkPrefixes?: readonly number[], ) => { - const encodeRawResult = encodeRaw(prefix, pubKey, validNetworkPrefixes); - if (encodeRawResult instanceof Error) return encodeRawResult; - return base58.encode(encodeRawResult); -}; + const encodeRawResult = encodeRaw(prefix, pubKey, validNetworkPrefixes) + if (encodeRawResult instanceof Error) return encodeRawResult + return base58.encode(encodeRawResult) +} export const encodeRaw = ( prefix: number, pubKey: Uint8Array, validNetworkPrefixes?: readonly number[], ): Uint8Array | InvalidPublicKeyLengthError | InvalidNetworkPrefixError => { - const isValidPublicKeyLength = !!VALID_PUBLIC_KEY_LENGTHS[pubKey.length]; + const isValidPublicKeyLength = !!VALID_PUBLIC_KEY_LENGTHS[pubKey.length] if (!isValidPublicKeyLength) { - return new InvalidPublicKeyLengthError(); + return new InvalidPublicKeyLengthError() } - const isValidNetworkPrefix = !validNetworkPrefixes || validNetworkPrefixes.includes(prefix); + const isValidNetworkPrefix = !validNetworkPrefixes || validNetworkPrefixes.includes(prefix) if (!isValidNetworkPrefix) { - return new InvalidNetworkPrefixError(); + return new InvalidNetworkPrefixError() } const prefixBytes = prefix < 64 @@ -46,34 +46,34 @@ export const encodeRaw = ( : Uint8Array.of( ((prefix & 0b0000_0000_1111_1100) >> 2) | 0b0100_0000, (prefix >> 8) | ((prefix & 0b0000_0000_0000_0011) << 6), - ); + ) - const hasher = new Blake2b(); + const hasher = new Blake2b() - hasher.update(SS58PRE); - hasher.update(prefixBytes); - hasher.update(pubKey); + hasher.update(SS58PRE) + hasher.update(prefixBytes) + hasher.update(pubKey) - const digest = hasher.digest(); - const checksum = digest.subarray(0, CHECKSUM_LENGTH); - hasher.dispose(); + const digest = hasher.digest() + const checksum = digest.subarray(0, CHECKSUM_LENGTH) + hasher.dispose() - const address = new Uint8Array(prefixBytes.length + pubKey.length + CHECKSUM_LENGTH); + const address = new Uint8Array(prefixBytes.length + pubKey.length + CHECKSUM_LENGTH) - address.set(prefixBytes, 0); - address.set(pubKey, prefixBytes.length); - address.set(checksum, prefixBytes.length + pubKey.length); + address.set(prefixBytes, 0) + address.set(pubKey, prefixBytes.length) + address.set(checksum, prefixBytes.length + pubKey.length) - return address; -}; + return address +} export class InvalidPublicKeyLengthError extends Error { - override readonly name = "InvalidPublicKeyLengthError"; + override readonly name = "InvalidPublicKeyLengthError" } export class InvalidNetworkPrefixError extends Error { - override readonly name = "InvalidNetworkPrefixError"; + override readonly name = "InvalidNetworkPrefixError" } -export const decode = (address: string) => decodeRaw(base58.decode(address)); +export const decode = (address: string) => decodeRaw(base58.decode(address)) export const decodeRaw = ( address: Uint8Array, ): @@ -81,42 +81,42 @@ export const decodeRaw = ( | InvalidAddressLengthError | InvalidAddressChecksumError => { - const isValidAddressLength = !!VALID_ADDRESS_LENGTHS[address.length]; + const isValidAddressLength = !!VALID_ADDRESS_LENGTHS[address.length] if (!isValidAddressLength) { - return new InvalidAddressLengthError(); + return new InvalidAddressLengthError() } - const prefixLength = address[0]! & 0b0100_0000 ? 2 : 1; + const prefixLength = address[0]! & 0b0100_0000 ? 2 : 1 const prefix: number = prefixLength === 1 ? address[0]! : ((address[0]! & 0b0011_1111) << 2) | (address[1]! >> 6) - | ((address[1]! & 0b0011_1111) << 8); + | ((address[1]! & 0b0011_1111) << 8) - const hasher = new Blake2b(); + const hasher = new Blake2b() - hasher.update(SS58PRE); - hasher.update(address.subarray(0, address.length - CHECKSUM_LENGTH)); + hasher.update(SS58PRE) + hasher.update(address.subarray(0, address.length - CHECKSUM_LENGTH)) - const digest = hasher.digest(); - const checksum = address.subarray(address.length - CHECKSUM_LENGTH); - hasher.dispose(); + const digest = hasher.digest() + const checksum = address.subarray(address.length - CHECKSUM_LENGTH) + hasher.dispose() if (digest[0] !== checksum[0] || digest[1] !== checksum[1]) { - return new InvalidAddressChecksumError(); + return new InvalidAddressChecksumError() } const pubKey = address.subarray( prefixLength, address.length - CHECKSUM_LENGTH, - ); + ) - return [prefix, pubKey]; -}; + return [prefix, pubKey] +} export class InvalidAddressLengthError extends Error { - override readonly name = "InvalidAddressError"; + override readonly name = "InvalidAddressError" } export class InvalidAddressChecksumError extends Error { - override readonly name = "InvalidAddressChecksumError"; + override readonly name = "InvalidAddressChecksumError" } diff --git a/test_util/clients/kusama.ts b/test_util/clients/kusama.ts index 7f18f4159..bfa0ac15d 100644 --- a/test_util/clients/kusama.ts +++ b/test_util/clients/kusama.ts @@ -1,3 +1,3 @@ -import { LocalClientEffect } from "../local.ts"; +import { LocalClientEffect } from "../local.ts" -export const client = new LocalClientEffect("kusama"); +export const client = new LocalClientEffect("kusama") diff --git a/test_util/clients/mod.ts b/test_util/clients/mod.ts index 283f702af..060bb7d7e 100644 --- a/test_util/clients/mod.ts +++ b/test_util/clients/mod.ts @@ -1,4 +1,4 @@ -export { client as kusama } from "./kusama.ts"; -export { client as polkadot } from "./polkadot.ts"; -export { client as rococo } from "./rococo.ts"; -export { client as westend } from "./westend.ts"; +export { client as kusama } from "./kusama.ts" +export { client as polkadot } from "./polkadot.ts" +export { client as rococo } from "./rococo.ts" +export { client as westend } from "./westend.ts" diff --git a/test_util/clients/polkadot.ts b/test_util/clients/polkadot.ts index 752452142..995234b67 100644 --- a/test_util/clients/polkadot.ts +++ b/test_util/clients/polkadot.ts @@ -1,3 +1,3 @@ -import { LocalClientEffect } from "../local.ts"; +import { LocalClientEffect } from "../local.ts" -export const client = new LocalClientEffect("polkadot"); +export const client = new LocalClientEffect("polkadot") diff --git a/test_util/clients/rococo.ts b/test_util/clients/rococo.ts index 830438236..39d4d6732 100644 --- a/test_util/clients/rococo.ts +++ b/test_util/clients/rococo.ts @@ -1,3 +1,3 @@ -import { LocalClientEffect } from "../local.ts"; +import { LocalClientEffect } from "../local.ts" -export const client = new LocalClientEffect("rococo"); +export const client = new LocalClientEffect("rococo") diff --git a/test_util/clients/westend.ts b/test_util/clients/westend.ts index 4cb3ebb1f..6348797cd 100644 --- a/test_util/clients/westend.ts +++ b/test_util/clients/westend.ts @@ -1,3 +1,3 @@ -import { LocalClientEffect } from "../local.ts"; +import { LocalClientEffect } from "../local.ts" -export const client = new LocalClientEffect("westend"); +export const client = new LocalClientEffect("westend") diff --git a/test_util/common.ts b/test_util/common.ts index f24ca865b..3141af67b 100644 --- a/test_util/common.ts +++ b/test_util/common.ts @@ -1,53 +1,53 @@ -export type RUNTIME_CODES = typeof RUNTIME_CODES; +export type RUNTIME_CODES = typeof RUNTIME_CODES export const RUNTIME_CODES = { polkadot: 0, kusama: 1, westend: 2, rococo: 3, -} as const; +} as const -export type RuntimeName = keyof RUNTIME_CODES; +export type RuntimeName = keyof RUNTIME_CODES export const RUNTIME_NAMES: { [N in RuntimeName as RUNTIME_CODES[N]]: N } = { 0: "polkadot", 1: "kusama", 2: "westend", 3: "rococo", -}; +} export function isRuntimeName(inQuestion: string): inQuestion is RuntimeName { return inQuestion === "polkadot" || inQuestion === "kusama" || inQuestion === "polkadot" - || inQuestion === "polkadot"; + || inQuestion === "polkadot" } export class InvalidRuntimeSpecifiedError extends Error { - override readonly name = "InvalidRuntimeSpecifiedError"; + override readonly name = "InvalidRuntimeSpecifiedError" constructor(readonly specified: string) { super( `There is no test runtime with the name "${specified}". Please specify one of the following values: ${ Object.values(RUNTIME_NAMES).join(", ") }`, - ); + ) } } export class PolkadotBinNotFoundError extends Error { - override readonly name = "PolkadotBinNotFoundError"; + override readonly name = "PolkadotBinNotFoundError" constructor() { super( "The Polkadot CLI was not found. Please ensure Polkadot is installed and PATH is set for `polkadot`." + `For more information, visit the following link: "https://github.com/paritytech/polkadot".`, - ); + ) } } export function polkadotProcess(port: number, runtimeName: RuntimeName) { - const cmd = ["polkadot", "--dev", "--ws-port", port.toString()]; + const cmd = ["polkadot", "--dev", "--ws-port", port.toString()] if (runtimeName !== "polkadot") { - cmd.push(`--force-${runtimeName}`); + cmd.push(`--force-${runtimeName}`) } try { // TODO: decide which specific logs to pipe to this file's process @@ -55,31 +55,31 @@ export function polkadotProcess(port: number, runtimeName: RuntimeName) { cmd, stdout: "piped", stderr: "piped", - }); + }) } catch (_e) { - throw new PolkadotBinNotFoundError(); + throw new PolkadotBinNotFoundError() } } export function getOpenPort(): number { - const tmp = Deno.listen({ port: 0 }); - const { port } = (tmp.addr as Deno.NetAddr); - tmp.close(); - return port; + const tmp = Deno.listen({ port: 0 }) + const { port } = (tmp.addr as Deno.NetAddr) + tmp.close() + return port } export async function portReady(port: number): Promise { - let attempts = 60; + let attempts = 60 while (--attempts) { try { - const connection = await Deno.connect({ port }); - connection.close(); - break; + const connection = await Deno.connect({ port }) + connection.close() + break } catch (e) { if (e instanceof Deno.errors.ConnectionRefused && attempts > 0) { - await new Promise((resolve) => setTimeout(resolve, 500)); + await new Promise((resolve) => setTimeout(resolve, 500)) } else { - throw new Error(); + throw new Error() } } } diff --git a/test_util/ctx.ts b/test_util/ctx.ts index 05289835e..ff6fd0f01 100644 --- a/test_util/ctx.ts +++ b/test_util/ctx.ts @@ -1,22 +1,22 @@ -import { parse } from "../deps/std/flags.ts"; -import { assert } from "../deps/std/testing/asserts.ts"; -import * as common from "./common.ts"; +import { parse } from "../deps/std/flags.ts" +import { assert } from "../deps/std/testing/asserts.ts" +import * as common from "./common.ts" -const { ["--"]: cmd } = parse(Deno.args, { "--": true }); -assert(cmd.length); +const { ["--"]: cmd } = parse(Deno.args, { "--": true }) +assert(cmd.length) -const processContainers: Partial> = {}; +const processContainers: Partial> = {} interface ProcessContainer { - port: number; - process: Deno.Process; + port: number + process: Deno.Process } const listener = Deno.listen({ transport: "tcp", port: 0, -}) as Deno.Listener; -const { hostname, port } = listener.addr as Deno.NetAddr; -useListener(listener); +}) as Deno.Listener +const { hostname, port } = listener.addr as Deno.NetAddr +useListener(listener) const cmdProcess = Deno.run({ cmd, @@ -24,43 +24,43 @@ const cmdProcess = Deno.run({ TEST_CTX_HOSTNAME: hostname, TEST_CTX_PORT: port.toString(), }, -}); +}) -const status = await cmdProcess.status(); -listener.close(); +const status = await cmdProcess.status() +listener.close() for (const { process } of Object.values(processContainers)) { - process.kill("SIGKILL"); - process.close(); + process.kill("SIGKILL") + process.close() } -Deno.exit(status.code); +Deno.exit(status.code) async function useListener(listener: Deno.Listener) { for await (const conn of listener) { - useConn(conn); + useConn(conn) } async function useConn(conn: Deno.Conn) { for await (const e of conn.readable) { - const e0 = e.at(0); + const e0 = e.at(0) if (typeof e0 !== "number") { - throw new Error(); + throw new Error() } - const runtimeName = (common.RUNTIME_NAMES as Record)[e0]!; - let processContainer = processContainers[runtimeName]; + const runtimeName = (common.RUNTIME_NAMES as Record)[e0]! + let processContainer = processContainers[runtimeName] if (!processContainer) { - const port = common.getOpenPort(); + const port = common.getOpenPort() processContainer = { port, process: common.polkadotProcess(port, runtimeName), - }; - processContainers[runtimeName] = processContainer; + } + processContainers[runtimeName] = processContainer } - const message = new Uint8Array(2); - new DataView(message.buffer).setUint16(0, processContainer.port); - (async () => { - await common.portReady(processContainer.port); - conn.write(message); - })(); + const message = new Uint8Array(2) + new DataView(message.buffer).setUint16(0, processContainer.port) + ;(async () => { + await common.portReady(processContainer.port) + conn.write(message) + })() } } } diff --git a/test_util/extrinsic.ts b/test_util/extrinsic.ts index d5ea649f9..de883b7fe 100644 --- a/test_util/extrinsic.ts +++ b/test_util/extrinsic.ts @@ -1,10 +1,10 @@ -import * as A from "../deps/std/testing/asserts.ts"; -import * as Z from "../deps/zones.ts"; -import { ExtrinsicProps, SignedExtrinsic } from "../effects/extrinsic.ts"; -import * as M from "../frame_metadata/mod.ts"; -import * as rpc from "../rpc/mod.ts"; +import * as A from "../deps/std/testing/asserts.ts" +import * as Z from "../deps/zones.ts" +import { ExtrinsicProps, SignedExtrinsic } from "../effects/extrinsic.ts" +import * as M from "../frame_metadata/mod.ts" +import * as rpc from "../rpc/mod.ts" -const k0_ = Symbol(); +const k0_ = Symbol() // TODO: use context / properly scope context / make it accessible outside of subscription lifecycle // TODO: better zones-level way to share context between effects @@ -13,15 +13,15 @@ export function collectExtrinsicEvents< Props extends Z.Rec$, Sign extends Z.$, >(extrinsic: SignedExtrinsic) { - const events: rpc.known.TransactionStatus[] = []; + const events: rpc.known.TransactionStatus[] = [] return extrinsic .watch(function(status) { - events.push(status); + events.push(status) if (rpc.known.TransactionStatus.isTerminal(status)) { - this.stop(); + this.stop() } }) - .next(() => events, k0_); + .next(() => events, k0_) } // TODO: is this a common-enough test to merit existence in `test_util`? @@ -30,14 +30,14 @@ export function assertStatusOrder( statuses: rpc.known.TransactionStatus[], statusOrderExpectation: StatusOrderExpectation, ) { - A.assertEquals(statuses.length, statusOrderExpectation.length); + A.assertEquals(statuses.length, statusOrderExpectation.length) for (let i = 0; i < statusOrderExpectation.length; i++) { - const expected = statusOrderExpectation[i]!; - const actualStatus = statuses[i]!; + const expected = statusOrderExpectation[i]! + const actualStatus = statuses[i]! if (typeof actualStatus === "string") { - A.assertEquals(actualStatus, expected); + A.assertEquals(actualStatus, expected) } else if (actualStatus.broadcast) { - A.assert(actualStatus[expected]); + A.assert(actualStatus[expected]) } } } @@ -55,4 +55,4 @@ export type StatusOrderExpectation = ( | "usurped" | "dropped" | "invalid" -)[]; +)[] diff --git a/test_util/local.ts b/test_util/local.ts index 8ae712990..950d9ce62 100644 --- a/test_util/local.ts +++ b/test_util/local.ts @@ -1,83 +1,83 @@ -import * as Z from "../deps/zones.ts"; -import * as C from "../mod.ts"; -import * as common from "./common.ts"; +import * as Z from "../deps/zones.ts" +import * as C from "../mod.ts" +import * as common from "./common.ts" -const hostname = Deno.env.get("TEST_CTX_HOSTNAME"); -const portRaw = Deno.env.get("TEST_CTX_PORT"); +const hostname = Deno.env.get("TEST_CTX_HOSTNAME") +const portRaw = Deno.env.get("TEST_CTX_PORT") class LocalClient extends C.rpc.Client { - url; + url constructor(port: number, close: () => void) { - const url = `ws://127.0.0.1:${port}`; - super(C.rpc.proxyProvider, url); - this.url = url; - const prevDiscard = this.discard; + const url = `ws://127.0.0.1:${port}` + super(C.rpc.proxyProvider, url) + this.url = url + const prevDiscard = this.discard this.discard = async () => { - const closeError = await prevDiscard(); - close(); - return closeError; - }; + const closeError = await prevDiscard() + close() + return closeError + } } } export class LocalClientEffect extends Z.Effect { - #clientPending?: Promise; + #clientPending?: Promise constructor(readonly runtime: common.RuntimeName) { - const getClientContainer: { getClient?: () => Promise } = {}; + const getClientContainer: { getClient?: () => Promise } = {} super({ kind: "LocalClient", impl: Z .call(async () => { try { - return await getClientContainer.getClient!(); + return await getClientContainer.getClient!() } catch (e) { - return e as common.PolkadotBinNotFoundError; + return e as common.PolkadotBinNotFoundError } }) .impl, items: [runtime], memoize: true, - }); - getClientContainer.getClient = this.createClient.bind(this); + }) + getClientContainer.getClient = this.createClient.bind(this) } get client(): Promise { if (!this.#clientPending) { - this.#clientPending = this.createClient(); + this.#clientPending = this.createClient() } - return this.#clientPending; + return this.#clientPending } get url(): Promise { - return this.client.then(({ url }) => url); + return this.client.then(({ url }) => url) } private async createClient(): Promise { - let port: number; - let close = () => {}; + let port: number + let close = () => {} if (portRaw /* in a test ctx */) { const conn = await Deno.connect({ hostname, port: parseInt(portRaw!), - }); - conn.write(new Uint8Array([common.RUNTIME_CODES[this.runtime]])); + }) + conn.write(new Uint8Array([common.RUNTIME_CODES[this.runtime]])) port = await (async () => { for await (const x of conn.readable) { - return new DataView(x.buffer).getUint16(0); + return new DataView(x.buffer).getUint16(0) } - return null!; - })(); + return null! + })() } else { - port = common.getOpenPort(); - const process = common.polkadotProcess(port, this.runtime); + port = common.getOpenPort() + const process = common.polkadotProcess(port, this.runtime) close = () => { - process.kill("SIGKILL"); - process.close(); - }; - await common.portReady(port); + process.kill("SIGKILL") + process.close() + } + await common.portReady(port) } - return new LocalClient(port, close); + return new LocalClient(port, close) } } diff --git a/test_util/mod.ts b/test_util/mod.ts index 3545944a0..080bf6201 100644 --- a/test_util/mod.ts +++ b/test_util/mod.ts @@ -1,11 +1,11 @@ -export * from "./clients/mod.ts"; +export * from "./clients/mod.ts" export { InvalidRuntimeSpecifiedError, isRuntimeName, PolkadotBinNotFoundError, polkadotProcess, type RuntimeName, -} from "./common.ts"; -export * as extrinsic from "./extrinsic.ts"; -export * from "./local.ts"; -export * from "./pairs.ts"; +} from "./common.ts" +export * as extrinsic from "./extrinsic.ts" +export * from "./local.ts" +export * from "./pairs.ts" diff --git a/test_util/pairs.ts b/test_util/pairs.ts index a78d024a8..faffa268a 100644 --- a/test_util/pairs.ts +++ b/test_util/pairs.ts @@ -1,29 +1,29 @@ -import { createTestPairs } from "../deps/polkadot/keyring.ts"; -import { KeyringPair } from "../deps/polkadot/keyring/types.ts"; -import { cryptoWaitReady } from "../deps/polkadot/util-crypto.ts"; -import { ArrayOfLength } from "../util/mod.ts"; +import { createTestPairs } from "../deps/polkadot/keyring.ts" +import { KeyringPair } from "../deps/polkadot/keyring/types.ts" +import { cryptoWaitReady } from "../deps/polkadot/util-crypto.ts" +import { ArrayOfLength } from "../util/mod.ts" -await cryptoWaitReady(); +await cryptoWaitReady() export interface Pairs { - all: ArrayOfLength; - alice: KeyringPair; - bob: KeyringPair; - charlie: KeyringPair; - dave: KeyringPair; - eve: KeyringPair; - ferdie: KeyringPair; + all: ArrayOfLength + alice: KeyringPair + bob: KeyringPair + charlie: KeyringPair + dave: KeyringPair + eve: KeyringPair + ferdie: KeyringPair } -export const { all: users, alice, bob, charlie, dave, eve, ferdie } = pairs(); +export const { all: users, alice, bob, charlie, dave, eve, ferdie } = pairs() export function pairs(...args: Parameters): Pairs { - const raw = createTestPairs(...args); - const alice = raw["alice"]!; - const bob = raw["bob"]!; - const charlie = raw["charlie"]!; - const dave = raw["dave"]!; - const eve = raw["eve"]!; - const ferdie = raw["ferdie"]!; + const raw = createTestPairs(...args) + const alice = raw["alice"]! + const bob = raw["bob"]! + const charlie = raw["charlie"]! + const dave = raw["dave"]! + const eve = raw["eve"]! + const ferdie = raw["ferdie"]! return { all: [alice, bob, charlie, dave, eve, ferdie], alice, @@ -32,5 +32,5 @@ export function pairs(...args: Parameters): Pairs { dave, eve, ferdie, - }; + } } diff --git a/util/Counter.ts b/util/Counter.ts index bfa570346..4778ee169 100644 --- a/util/Counter.ts +++ b/util/Counter.ts @@ -1,9 +1,9 @@ export class Counter { - i = 0; + i = 0 inc = () => { - const tmp = this.i; - this.i++; - return tmp; - }; + const tmp = this.i + this.i++ + return tmp + } } diff --git a/util/Listener.ts b/util/Listener.ts index 01b4722ee..f61004540 100644 --- a/util/Listener.ts +++ b/util/Listener.ts @@ -1,4 +1,4 @@ -export type Listener = (this: This, event: Event) => void; +export type Listener = (this: This, event: Event) => void export function contramapListener() { return ( @@ -6,7 +6,7 @@ export function contramapListener() { map: (this: This, message: From) => Into, ): Listener => { return function(e: From) { - return listener.apply(this, [map.apply(this, [e])]); - }; - }; + return listener.apply(this, [map.apply(this, [e])]) + } + } } diff --git a/util/branded.ts b/util/branded.ts index f63f8738b..11123260c 100644 --- a/util/branded.ts +++ b/util/branded.ts @@ -1,18 +1,18 @@ -import { HasherKind } from "../frame_metadata/Metadata.ts"; -export type Branded = T & { [_ in Brand]: V }; +import { HasherKind } from "../frame_metadata/Metadata.ts" +export type Branded = T & { [_ in Brand]: V } -declare const _hex: unique symbol; -export type Hex = Branded; +declare const _hex: unique symbol +export type Hex = Branded -declare const _encoded: unique symbol; -export type Encoded = Branded; +declare const _encoded: unique symbol +export type Encoded = Branded -declare const _hash: unique symbol; +declare const _hash: unique symbol export type Hash = Branded< Uint8Array, typeof _hash, [T, K] ->; +> -export type HexEncoded = Hex>; -export type HexHash = Hex>; +export type HexEncoded = Hex> +export type HexHash = Hex> diff --git a/util/error.ts b/util/error.ts index 5a2fced04..8e5457222 100644 --- a/util/error.ts +++ b/util/error.ts @@ -1,6 +1,6 @@ export function throwIfError(value: T): Exclude { if (value instanceof Error) { - throw value; + throw value } - return value as Exclude; + return value as Exclude } diff --git a/util/hex.ts b/util/hex.ts index 8a31182c4..485ae0aa5 100644 --- a/util/hex.ts +++ b/util/hex.ts @@ -1,25 +1,25 @@ -import { Hex } from "./branded.ts"; +import { Hex } from "./branded.ts" -export { decode as decodeBuf, encode as encodeBuf } from "../deps/std/encoding/hex.ts"; +export { decode as decodeBuf, encode as encodeBuf } from "../deps/std/encoding/hex.ts" export function decode(hex: string): Uint8Array { - if (hex.startsWith("0x")) hex = hex.slice(2); - if (hex.length % 2 === 1) hex = "0" + hex; - const array = new Uint8Array(hex.length / 2); + if (hex.startsWith("0x")) hex = hex.slice(2) + if (hex.length % 2 === 1) hex = "0" + hex + const array = new Uint8Array(hex.length / 2) for (let i = 0; i < hex.length; i++) { - array[i] = parseInt(hex.slice(i * 2, i * 2 + 2), 16); + array[i] = parseInt(hex.slice(i * 2, i * 2 + 2), 16) } - return array; + return array } export function encode(bytes: Uint8Array): Hex { - let str = ""; + let str = "" for (let i = 0; i < bytes.length; i++) { - str += bytes[i]!.toString(16).padStart(2, "0"); + str += bytes[i]!.toString(16).padStart(2, "0") } - return str as Hex; + return str as Hex } export function encodePrefixed(bytes: Uint8Array): Hex { - return ("0x" + encode(bytes)) as Hex; + return ("0x" + encode(bytes)) as Hex } diff --git a/util/map.ts b/util/map.ts index 655e2b26c..bb124a2d2 100644 --- a/util/map.ts +++ b/util/map.ts @@ -1,7 +1,7 @@ export interface MapLike { - set(key: K, value: V): void; - get(key: K): undefined | V; - delete(key: K): boolean; + set(key: K, value: V): void + get(key: K): undefined | V + delete(key: K): boolean } export function getOrInit( @@ -9,10 +9,10 @@ export function getOrInit( key: K, init: () => V, ): V { - let value = container.get(key); + let value = container.get(key) if (value === undefined) { - value = init(); - container.set(key, value); + value = init() + container.set(key, value) } - return value; + return value } diff --git a/util/mod.ts b/util/mod.ts index 814782f2d..fc33492c0 100644 --- a/util/mod.ts +++ b/util/mod.ts @@ -1,8 +1,8 @@ -export * from "./branded.ts"; -export * from "./Counter.ts"; -export * from "./error.ts"; -export * as hex from "./hex.ts"; -export * from "./Listener.ts"; -export * from "./map.ts"; -export * from "./tuple.ts"; -export * from "./types.ts"; +export * from "./branded.ts" +export * from "./Counter.ts" +export * from "./error.ts" +export * as hex from "./hex.ts" +export * from "./Listener.ts" +export * from "./map.ts" +export * from "./tuple.ts" +export * from "./types.ts" diff --git a/util/tuple.ts b/util/tuple.ts index cb08c135f..f16711237 100644 --- a/util/tuple.ts +++ b/util/tuple.ts @@ -1,7 +1,7 @@ export function tuple(...elements: [...Elements]): Elements { - return elements; + return elements } export type ArrayOfLength = number extends L ? T[] : L extends A["length"] ? A - : ArrayOfLength; + : ArrayOfLength diff --git a/util/types.ts b/util/types.ts index f2fee1bb4..2301aacae 100644 --- a/util/types.ts +++ b/util/types.ts @@ -1,4 +1,4 @@ -export type ValueOf = T[keyof T]; +export type ValueOf = T[keyof T] export type U2I = (T extends any ? (x: T) => any : never) extends (x: infer R) => any ? R - : never; + : never diff --git a/words.txt b/words.txt index 48c60e8bd..fdeb7551e 100644 --- a/words.txt +++ b/words.txt @@ -202,3 +202,4 @@ prevotes precommits inherents bootnode +nocompile