Validation + parsing
Parses JSON strings, validates them against a Zod schema, and returns typed results — either on the main thread or through a Knitting worker pool. If your app already does JSON.parse + validation and you want to see what offloading looks like, start here.
How it works
Section titled “How it works”The host generates JSON strings (some valid, some intentionally broken). Each job parses the string, runs it through UserSchema.safeParse, and returns { ok: true, value } or { ok: false, issues }. The host aggregates counts and prints sample failures.
Three files:
schema_knitting.ts— runs parse+validate in host and Knitting modesutils.ts— schema logic, payload builders, task exportsbench_schema_validate.ts— host-vs-worker benchmark withmitata
Example payloads
Section titled “Example payloads”// valid{ "id": "u_42", "email": "ari@knitting.dev", "displayName": "Ari Lane", "age": 29, "roles": ["admin"], "marketingOptIn": true}
// invalid{ "id": "u_42", "email": "ari@knitting.dev", "displayName": "x", "age": "unknown", "roles": ["owner"]}The first returns { ok: true, value }. The second returns { ok: false, issues }, with
messages like displayName: String must contain at least 2 character(s) or
age: Expected number, received string.
Install
Section titled “Install”deno add --npm jsr:@vixeny/knittingdeno add npm:zod npm:mitatanpx jsr add @vixeny/knittingnpm i zod mitata# pnpm 10.9+pnpm add jsr:@vixeny/knitting
# fallback (older pnpm)pnpm dlx jsr add @vixeny/knitting
pnpm add zod mitata# yarn 4.9+yarn add jsr:@vixeny/knitting
# fallback (older yarn)yarn dlx jsr add @vixeny/knitting
yarn add zod mitatabunx jsr add @vixeny/knittingbun add zod mitatabun src/schema_knitting.tsdeno run -A src/schema_knitting.tsnpx tsx src/schema_knitting.tsYou should see output like:
-- host mode -- valid: 800 invalid: 200 sample issue: ["Expected string, received number"]
-- knitting mode (2 threads) -- valid: 800 invalid: 200 sample issue: ["Expected string, received number"]Optional benchmark
Section titled “Optional benchmark”The benchmark compares JSON.parse + safeParse via direct function imports (host) against the
same logic dispatched through a worker pool (knitting). Batch calls keep per-dispatch overhead
predictable.
Expected output:
benchmark avg (ns) min ... max (ns)host 12,340 11,200 ... 18,400knitting 6,890 6,100 ... 11,200import { createPool, isMain } from "@vixeny/knitting";import { buildPayloads, parseAndValidate, parseAndValidateHost, type ParseValidateResult,} from "./utils.ts";
const THREADS = 2;const REQUESTS = 20_000;const INVALID_PERCENT = 15;
type Summary = { valid: number; invalid: number; sampleIssues: string[];};
function summarize(results: ParseValidateResult[]): Summary { let valid = 0; let invalid = 0; const sampleIssues: string[] = [];
for (let i = 0; i < results.length; i++) { const result = results[i]!; if (result.ok) { valid++; continue; }
invalid++; if (sampleIssues.length < 3 && result.issues.length > 0) { sampleIssues.push(result.issues[0]!); } }
return { valid, invalid, sampleIssues };}
function runHost(payloads: string[]): Summary { const results = payloads.map((payload) => parseAndValidateHost(payload)); return summarize(results);}
async function runWorkers(payloads: string[]): Promise<Summary> { const pool = createPool({ threads: THREADS })({ parseAndValidate });
try { const jobs: Promise<ParseValidateResult>[] = []; for (let i = 0; i < payloads.length; i++) { jobs.push(pool.call.parseAndValidate(payloads[i]!)); }
const results = await Promise.all(jobs); return summarize(results); } finally { pool.shutdown(); }}
function printSummary(mode: string, summary: Summary, ms: number): void { const secs = Math.max(1e-9, ms / 1000); const rps = REQUESTS / secs;
console.log(mode); console.log("requests :", REQUESTS.toLocaleString()); console.log("invalidRate :", `${INVALID_PERCENT}%`); console.log("valid :", summary.valid.toLocaleString()); console.log("invalid :", summary.invalid.toLocaleString()); console.log("took :", `${ms.toFixed(2)} ms`); console.log("throughput :", `${rps.toFixed(0)} req/s`);
if (summary.sampleIssues.length > 0) { console.log("sampleIssues:", summary.sampleIssues.join(" | ")); }}
async function main() { const payloads = buildPayloads(REQUESTS, INVALID_PERCENT);
const hostStart = performance.now(); const hostSummary = runHost(payloads); const hostMs = performance.now() - hostStart;
const workerStart = performance.now(); const workerSummary = await runWorkers(payloads); const workerMs = performance.now() - workerStart;
const uplift = (hostMs / Math.max(1e-9, workerMs) - 1) * 100;
console.log("JSON parse + schema validation"); console.log(`threads: ${THREADS}`); console.log(""); printSummary("host", hostSummary, hostMs); console.log(""); printSummary("knitting", workerSummary, workerMs); console.log(""); console.log(`uplift: ${uplift.toFixed(1)}%`);}
if (isMain) { main().catch((error) => { console.error(error); process.exitCode = 1; });}import { createPool, isMain } from "@vixeny/knitting";import { bench, boxplot, run, summary } from "mitata";import { buildPayloads, makeBatches, mergeValidationSummary, parseAndValidateBatchFast, parseAndValidateBatchFastHost, sameValidationSummary, type ValidationSummary,} from "./utils.ts";
const THREADS = 2;const REQUESTS = 20_000;const INVALID_PERCENT = 15;const BATCH = 64;
async function main() { const payloads = buildPayloads(REQUESTS, INVALID_PERCENT); const payloadBatches = makeBatches(payloads, BATCH);
const pool = createPool({ threads: THREADS, })({ parseAndValidateBatchFast }); let sink = 0;
try { const hostCheck = runHostBatches(payloadBatches); const workerCheck = await runWorkerBatches( pool.call.parseAndValidateBatchFast, payloadBatches, );
if (!sameValidationSummary(hostCheck, workerCheck)) { throw new Error("Host and worker validation counts differ."); }
console.log("Schema validation benchmark (mitata)"); console.log("workload: JSON.parse + UserSchema.safeParse"); console.log("requests per iteration:", REQUESTS.toLocaleString()); console.log("invalid rate:", `${INVALID_PERCENT}%`); console.log("batch size:", BATCH); console.log("threads:", THREADS);
boxplot(() => { summary(() => { bench(`host (${REQUESTS.toLocaleString()} req, batch ${BATCH})`, () => { const totals = runHostBatches(payloadBatches); sink = totals.valid; });
bench( `knitting (${THREADS} thread${ THREADS === 1 ? "" : "s" }, ${REQUESTS.toLocaleString()} req, batch ${BATCH})`, async () => { const totals = await runWorkerBatches( pool.call.parseAndValidateBatchFast, payloadBatches, ); sink = totals.valid; }, ); }); });
await run(); console.log("last valid count:", sink.toLocaleString()); } finally { pool.shutdown(); }}
function runHostBatches(payloadBatches: string[][]): ValidationSummary { let totals: ValidationSummary = { valid: 0, invalid: 0 };
for (let i = 0; i < payloadBatches.length; i++) { totals = mergeValidationSummary( totals, parseAndValidateBatchFastHost(payloadBatches[i]!), ); }
return totals;}
async function runWorkerBatches( callBatch: (payloads: string[]) => Promise<ValidationSummary>, payloadBatches: string[][],): Promise<ValidationSummary> { const jobs: Promise<ValidationSummary>[] = []; for (let i = 0; i < payloadBatches.length; i++) { jobs.push(callBatch(payloadBatches[i]!)); }
const results = await Promise.all(jobs);
let totals: ValidationSummary = { valid: 0, invalid: 0 }; for (let i = 0; i < results.length; i++) { totals = mergeValidationSummary(totals, results[i]!); } return totals;}
if (isMain) { main().catch((error) => { console.error(error); process.exitCode = 1; });}import { task } from "@vixeny/knitting";import { z } from "zod";
const UserSchema = z.object({ id: z.string().min(1), email: z.string().email(), displayName: z.string().min(2).max(80), age: z.number().int().min(13).max(120), roles: z.array(z.enum(["user", "admin", "moderator"])).default(["user"]), marketingOptIn: z.boolean().default(false),});
export type User = z.infer<typeof UserSchema>;
export type ParseValidateResult = | { ok: true; value: User } | { ok: false; issues: string[] };
export type ValidationSummary = { valid: number; invalid: number;};
export function makeValidPayload(i: number): string { const short = i.toString(36); const role = i % 9 === 0 ? "admin" : "user";
return JSON.stringify({ id: `u_${short}`, email: `${short}@knitting.dev`, displayName: `User ${short.toUpperCase()}`, age: 18 + (i % 60), roles: [role], marketingOptIn: i % 2 === 0, });}
export function makePayload(i: number, invalidPercent: number): string { if (i % 100 >= invalidPercent) return makeValidPayload(i);
switch (i % 4) { case 0: return '{"id":"broken"'; case 1: return JSON.stringify({ id: `u_${i}`, displayName: `User ${i}`, age: 33, roles: ["user"], marketingOptIn: true, }); case 2: return JSON.stringify({ id: `u_${i}`, email: `u_${i}@knitting.dev`, displayName: "x", age: "unknown", roles: ["user"], }); default: return JSON.stringify({ id: `u_${i}`, email: `u_${i}@knitting.dev`, displayName: `User ${i}`, age: 31, roles: ["owner"], }); }}
export function buildPayloads(count: number, invalidPercent: number): string[] { const cappedInvalid = Math.max(0, Math.min(95, Math.floor(invalidPercent))); const size = Math.max(0, Math.floor(count)); const payloads = new Array<string>(size); for (let i = 0; i < size; i++) payloads[i] = makePayload(i, cappedInvalid); return payloads;}
export function makeBatches<T>(values: T[], batchSize: number): T[][] { const size = Math.max(1, Math.floor(batchSize)); const batches: T[][] = []; for (let i = 0; i < values.length; i += size) { batches.push(values.slice(i, i + size)); } return batches;}
export function mergeValidationSummary( a: ValidationSummary, b: ValidationSummary,): ValidationSummary { return { valid: a.valid + b.valid, invalid: a.invalid + b.invalid, };}
export function sameValidationSummary( a: ValidationSummary, b: ValidationSummary,): boolean { return a.valid === b.valid && a.invalid === b.invalid;}
function toIssues(error: z.ZodError): string[] { return error.issues.map((issue) => { const path = issue.path.length > 0 ? issue.path.join(".") : "payload"; return `${path}: ${issue.message}`; });}
export function parseAndValidateHost(rawPayload: string): ParseValidateResult { let parsed: unknown; try { parsed = JSON.parse(rawPayload) as unknown; } catch { return { ok: false, issues: ["payload: invalid JSON string"] }; }
const result = UserSchema.safeParse(parsed); if (!result.success) { return { ok: false, issues: toIssues(result.error) }; }
return { ok: true, value: result.data };}
export const parseAndValidate = task<string, ParseValidateResult>({ f: parseAndValidateHost,});
export function parseAndValidateFastHost(rawPayload: string): boolean { let parsed: unknown; try { parsed = JSON.parse(rawPayload) as unknown; } catch { return false; }
return UserSchema.safeParse(parsed).success;}
export function parseAndValidateBatchFastHost( rawPayloads: string[],): ValidationSummary { let valid = 0; let invalid = 0;
for (let i = 0; i < rawPayloads.length; i++) { if (parseAndValidateFastHost(rawPayloads[i]!)) { valid++; } else { invalid++; } }
return { valid, invalid };}
export const parseAndValidateBatchFast = task<string[], ValidationSummary>({ f: parseAndValidateBatchFastHost,});When to use this pattern
Section titled “When to use this pattern”Schema validation is a textbook case for worker offloading: each call is independent, the input/output is small, and Zod’s internals are CPU-bound (type checking, error formatting). If you’re validating hundreds of payloads per second — API gateway, webhook ingestion, form processing — batching them through a pool can free your main thread without changing any validation logic.