Salting + hashing
Derives password hashes with PBKDF2-SHA256 and verifies them with constant-time comparison. This is the heaviest per-call workload in the validation set — each hash derivation is genuinely CPU-expensive by design (that’s the point of PBKDF2).
How it works
Section titled “How it works”- Generate a random salt per password.
- Derive a hash using
PBKDF2-SHA256via Web Crypto. - Store as a compact record:
algorithm$iterations$keyBytes$salt$hash. - Verify login attempts by recomputing and constant-time comparing.
The benchmark uses Uint8Array payloads to reduce serialization noise in hot loops.
Three files:
salt_knitting.ts— runs salting + verification in host and Knitting modesutils.ts— hashing, verification, and fast-path packet functionsbench_salt_hashing.ts— host-vs-worker benchmark withmitata
Example record shape
Section titled “Example record shape”The stored credential format is intentionally compact:
pbkdf2-sha256$600000$32$<salt-base64url>$<hash-base64url>At a high level the flow is:
password -> derive PBKDF2 hash with random salt -> store compact recordlogin attempt -> derive again with stored salt -> constant-time compareThat makes the worker boundary simple: small input, small output, expensive CPU work in the middle.
Install
Section titled “Install”deno add --npm jsr:@vixeny/knittingdeno add npm:mitatanpx jsr add @vixeny/knittingnpm i mitata# pnpm 10.9+pnpm add jsr:@vixeny/knitting
# fallback (older pnpm)pnpm dlx jsr add @vixeny/knitting
pnpm add mitata# yarn 4.9+yarn add jsr:@vixeny/knitting
# fallback (older yarn)yarn dlx jsr add @vixeny/knitting
yarn add mitatabunx jsr add @vixeny/knittingbun add mitataUses built-in Web Crypto APIs — no extra crypto package required.
bun src/salt_knitting.tsdeno run -A src/salt_knitting.tsnpx tsx src/salt_knitting.tsExpected output:
-- host mode -- hashed: 100 verified: 100 mismatches: 0
-- knitting mode (2 threads) -- hashed: 100 verified: 100 mismatches: 0Optional benchmark
Section titled “Optional benchmark”Compares hashing typed-array packets through direct imports (host) vs worker task calls (knitting). Because PBKDF2 is intentionally slow (high iteration count), this is where workers shine most — each call does enough real work to easily justify dispatch overhead.
import { createPool, isMain } from "@vixeny/knitting";import { decodeHashResultPacket, hashPassword, hashPasswordHost, hashPasswordPacketHost, makeHashPacketForIndex, verifyPassword, verifyPasswordHost,} from "./utils.ts";
const THREADS = 2;const REQUESTS = 2_000;const ITERATIONS = 120_000;const MISMATCH_PERCENT = 5;
type Summary = { hashed: number; verified: number; mismatched: number;};
function passwordFor(i: number): string { return `user-${i.toString(36)}-password`;}
function expectedPassword(i: number): string { if (i % 100 < MISMATCH_PERCENT) return `wrong-${i.toString(36)}-password`; return passwordFor(i);}
async function runHost(): Promise<Summary> { let verified = 0; let mismatched = 0;
for (let i = 0; i < REQUESTS; i++) { const password = passwordFor(i); const hashed = await hashPasswordHost({ password, iterations: ITERATIONS }); const checked = await verifyPasswordHost({ password: expectedPassword(i), record: hashed.record, });
if (checked.ok) verified++; else mismatched++; }
return { hashed: REQUESTS, verified, mismatched };}
async function runWorkers(): Promise<Summary> { const pool = createPool({ threads: THREADS })({ hashPassword, verifyPassword, });
try { const hashJobs: Promise<{ record: string }>[] = []; for (let i = 0; i < REQUESTS; i++) { hashJobs.push(pool.call.hashPassword({ password: passwordFor(i), iterations: ITERATIONS, })); }
const hashes = await Promise.all(hashJobs); const verifyJobs: Promise<{ ok: boolean }>[] = []; for (let i = 0; i < REQUESTS; i++) { verifyJobs.push(pool.call.verifyPassword({ password: expectedPassword(i), record: hashes[i]!.record, })); }
const checks = await Promise.all(verifyJobs); let verified = 0; for (let i = 0; i < checks.length; i++) { if (checks[i]!.ok) verified++; } const mismatched = REQUESTS - verified; return { hashed: REQUESTS, verified, mismatched }; } finally { pool.shutdown(); }}
function printSummary(mode: string, summary: Summary, ms: number): void { const seconds = Math.max(1e-9, ms / 1000); const ops = REQUESTS / seconds; console.log(mode); console.log("requests :", REQUESTS.toLocaleString()); console.log("iterations :", ITERATIONS.toLocaleString()); console.log("mismatch rate :", `${MISMATCH_PERCENT}%`); console.log("hashed :", summary.hashed.toLocaleString()); console.log("verified :", summary.verified.toLocaleString()); console.log("mismatched :", summary.mismatched.toLocaleString()); console.log("took :", `${ms.toFixed(2)} ms`); console.log("throughput :", `${ops.toFixed(0)} req/s`);}
async function printPacketSample() { const packet = makeHashPacketForIndex(7, ITERATIONS, 32, 16); const result = await hashPasswordPacketHost(packet); const decoded = decodeHashResultPacket(result); console.log("packet sample : iterations", decoded.iterations); console.log("salt(base64) :", decoded.saltBase64); console.log("hash(base64) :", decoded.hashBase64);}
async function main() { const hostStart = performance.now(); const host = await runHost(); const hostMs = performance.now() - hostStart;
const workerStart = performance.now(); const knitting = await runWorkers(); const workerMs = performance.now() - workerStart;
const uplift = (hostMs / Math.max(1e-9, workerMs) - 1) * 100;
console.log("Password salting + hashing"); console.log(`threads: ${THREADS}`); console.log(""); printSummary("host", host, hostMs); console.log(""); printSummary("knitting", knitting, workerMs); console.log(""); console.log(`uplift: ${uplift.toFixed(1)}%`); await printPacketSample();}
if (isMain) { main().catch((error) => { console.error(error); process.exitCode = 1; });}import { createPool, isMain } from "@vixeny/knitting";import { bench, boxplot, run, summary } from "mitata";import { buildDemoHashPackets, type HashBatchSummary, hashPasswordPacketBatchFast, hashPasswordPacketBatchFastHost,} from "./utils.ts";
const THREADS = 2;const REQUESTS = 500;const BATCH = 32;const ITERATIONS = 1_200;const KEY_BYTES = 32;const SALT_BYTES = 16;
async function main() { const packets = buildDemoHashPackets({ count: REQUESTS, iterations: ITERATIONS, keyBytes: KEY_BYTES, saltBytes: SALT_BYTES, }); const batches = makeBatches(packets, BATCH);
const pool = createPool({ threads: THREADS })({ hashPasswordPacketBatchFast, }); let sink = 0;
try { const hostCheck = await runHostBatches(batches); const workerCheck = await runWorkerBatches( pool.call.hashPasswordPacketBatchFast, batches, );
if (!same(hostCheck, workerCheck)) { throw new Error("Host and worker hashing summaries differ."); }
console.log("Salt hashing benchmark (mitata)"); console.log("workload: PBKDF2-SHA256 on Uint8Array request packets"); console.log("requests per iteration:", REQUESTS.toLocaleString()); console.log("iterations:", ITERATIONS.toLocaleString()); console.log("batch size:", BATCH); console.log("threads:", THREADS);
boxplot(() => { summary(() => { bench( `host (${REQUESTS.toLocaleString()} req, batch ${BATCH})`, async () => { const totals = await runHostBatches(batches); sink = totals.outputBytes ^ totals.digestXor; }, );
bench( `knitting (${THREADS} thread${ THREADS === 1 ? "" : "s" }, ${REQUESTS.toLocaleString()} req, batch ${BATCH})`, async () => { const totals = await runWorkerBatches( pool.call.hashPasswordPacketBatchFast, batches, ); sink = totals.outputBytes ^ totals.digestXor; }, ); }); });
await run(); console.log("last sink:", sink.toLocaleString()); } finally { pool.shutdown(); }}
function makeBatches(packets: Uint8Array[], batchSize: number): Uint8Array[][] { const out: Uint8Array[][] = []; for (let i = 0; i < packets.length; i += batchSize) { out.push(packets.slice(i, i + batchSize)); } return out;}
function merge(a: HashBatchSummary, b: HashBatchSummary): HashBatchSummary { return { count: a.count + b.count, outputBytes: a.outputBytes + b.outputBytes, digestXor: a.digestXor ^ b.digestXor, };}
async function runHostBatches( batches: Uint8Array[][],): Promise<HashBatchSummary> { let totals: HashBatchSummary = { count: 0, outputBytes: 0, digestXor: 0 }; for (let i = 0; i < batches.length; i++) { totals = merge(totals, await hashPasswordPacketBatchFastHost(batches[i]!)); } return totals;}
async function runWorkerBatches( callBatch: (packets: Uint8Array[]) => Promise<HashBatchSummary>, batches: Uint8Array[][],): Promise<HashBatchSummary> { const jobs: Promise<HashBatchSummary>[] = []; for (let i = 0; i < batches.length; i++) jobs.push(callBatch(batches[i]!)); const results = await Promise.all(jobs);
let totals: HashBatchSummary = { count: 0, outputBytes: 0, digestXor: 0 }; for (let i = 0; i < results.length; i++) totals = merge(totals, results[i]!); return totals;}
function same(a: HashBatchSummary, b: HashBatchSummary): boolean { return a.count === b.count && a.outputBytes === b.outputBytes && a.digestXor === b.digestXor;}
if (isMain) { main().catch((error) => { console.error(error); process.exitCode = 1; });}import { task } from "@vixeny/knitting";
const encoder = new TextEncoder();const decoder = new TextDecoder();
const DEFAULT_ITERATIONS = 120_000;const DEFAULT_KEY_BYTES = 32;const DEFAULT_SALT_BYTES = 16;const MIN_ITERATIONS = 10_000;const MAX_ITERATIONS = 2_000_000;const MIN_KEY_BYTES = 16;const MAX_KEY_BYTES = 64;const MIN_SALT_BYTES = 8;const MAX_SALT_BYTES = 32;
export type HashRequest = { password: string; iterations?: number; keyBytes?: number; saltBase64?: string;};
export type HashResponse = { record: string; algorithm: "pbkdf2-sha256"; iterations: number; keyBytes: number; saltBase64: string; hashBase64: string;};
export type VerifyRequest = { password: string; record: string;};
export type VerifyResponse = { ok: boolean; reason?: string;};
export type HashBatchSummary = { count: number; outputBytes: number; digestXor: number;};
export type DemoPacketOptions = { count: number; iterations?: number; keyBytes?: number; saltBytes?: number;};
type ParsedRecord = { algorithm: "pbkdf2-sha256"; iterations: number; keyBytes: number; salt: Uint8Array; hash: Uint8Array;};
function clampInt( value: unknown, fallback: number, min: number, max: number,): number { const numeric = Number(value); if (!Number.isFinite(numeric)) return fallback; const integer = Math.floor(numeric); if (integer < min) return min; if (integer > max) return max; return integer;}
function bytesToBase64(bytes: Uint8Array): string { let raw = ""; for (let i = 0; i < bytes.length; i++) raw += String.fromCharCode(bytes[i]!); return btoa(raw);}
function base64ToBytes(value: string): Uint8Array | null { try { const raw = atob(value); const bytes = new Uint8Array(raw.length); for (let i = 0; i < raw.length; i++) bytes[i] = raw.charCodeAt(i); return bytes; } catch { return null; }}
function fixedTimeEqual(a: Uint8Array, b: Uint8Array): boolean { if (a.length !== b.length) return false; let diff = 0; for (let i = 0; i < a.length; i++) diff |= a[i]! ^ b[i]!; return diff === 0;}
function assertPassword(password: string): string { if (typeof password !== "string" || password.length < 8) { throw new Error("password must be at least 8 characters"); } return password;}
function normalizeSalt( saltBase64: string | undefined, saltBytes: number,): Uint8Array { if (!saltBase64) return crypto.getRandomValues(new Uint8Array(saltBytes)); const salt = base64ToBytes(saltBase64); if (!salt) throw new Error("saltBase64 is not valid base64"); if (salt.length < MIN_SALT_BYTES || salt.length > MAX_SALT_BYTES) { throw new Error( `salt length must be ${MIN_SALT_BYTES}-${MAX_SALT_BYTES} bytes`, ); } return salt;}
async function derivePbkdf2( passwordBytes: Uint8Array, saltBytes: Uint8Array, iterations: number, keyBytes: number,): Promise<Uint8Array> { const key = await crypto.subtle.importKey( "raw", passwordBytes, "PBKDF2", false, ["deriveBits"], );
const bits = await crypto.subtle.deriveBits( { name: "PBKDF2", hash: "SHA-256", salt: saltBytes, iterations }, key, keyBytes * 8, );
return new Uint8Array(bits);}
function makeRecord( iterations: number, keyBytes: number, salt: Uint8Array, hash: Uint8Array,): string { return [ "pbkdf2-sha256", String(iterations), String(keyBytes), bytesToBase64(salt), bytesToBase64(hash), ].join("$");}
function parseRecord(record: string): ParsedRecord | null { const parts = record.split("$"); if (parts.length !== 5) return null; if (parts[0] !== "pbkdf2-sha256") return null;
const iterations = Number(parts[1]); const keyBytes = Number(parts[2]); if (!Number.isInteger(iterations) || !Number.isInteger(keyBytes)) return null; if (iterations < MIN_ITERATIONS || iterations > MAX_ITERATIONS) return null; if (keyBytes < MIN_KEY_BYTES || keyBytes > MAX_KEY_BYTES) return null;
const salt = base64ToBytes(parts[3]!); const hash = base64ToBytes(parts[4]!); if (!salt || !hash) return null; if (salt.length < MIN_SALT_BYTES || salt.length > MAX_SALT_BYTES) return null; if (hash.length !== keyBytes) return null;
return { algorithm: "pbkdf2-sha256", iterations, keyBytes, salt, hash, };}
export async function hashPasswordHost( request: HashRequest,): Promise<HashResponse> { const password = assertPassword(request.password); const iterations = clampInt( request.iterations, DEFAULT_ITERATIONS, MIN_ITERATIONS, MAX_ITERATIONS, ); const keyBytes = clampInt( request.keyBytes, DEFAULT_KEY_BYTES, MIN_KEY_BYTES, MAX_KEY_BYTES, ); const saltBytes = clampInt( DEFAULT_SALT_BYTES, DEFAULT_SALT_BYTES, MIN_SALT_BYTES, MAX_SALT_BYTES, ); const salt = normalizeSalt(request.saltBase64, saltBytes);
const hash = await derivePbkdf2( encoder.encode(password), salt, iterations, keyBytes, ); const saltBase64 = bytesToBase64(salt); const hashBase64 = bytesToBase64(hash);
return { record: makeRecord(iterations, keyBytes, salt, hash), algorithm: "pbkdf2-sha256", iterations, keyBytes, saltBase64, hashBase64, };}
export async function verifyPasswordHost( request: VerifyRequest,): Promise<VerifyResponse> { const password = assertPassword(request.password); const parsed = parseRecord(request.record); if (!parsed) return { ok: false, reason: "record format is invalid" };
const hash = await derivePbkdf2( encoder.encode(password), parsed.salt, parsed.iterations, parsed.keyBytes, );
return fixedTimeEqual(hash, parsed.hash) ? { ok: true } : { ok: false, reason: "password mismatch" };}
export const hashPassword = task<HashRequest, HashResponse>({ f: hashPasswordHost,});
export const verifyPassword = task<VerifyRequest, VerifyResponse>({ f: verifyPasswordHost,});
function writeU16LE(out: Uint8Array, offset: number, value: number): void { out[offset] = value & 255; out[offset + 1] = (value >>> 8) & 255;}
function writeU32LE(out: Uint8Array, offset: number, value: number): void { out[offset] = value & 255; out[offset + 1] = (value >>> 8) & 255; out[offset + 2] = (value >>> 16) & 255; out[offset + 3] = (value >>> 24) & 255;}
function readU16LE(input: Uint8Array, offset: number): number { return input[offset]! | (input[offset + 1]! << 8);}
function readU32LE(input: Uint8Array, offset: number): number { return ( input[offset]! | (input[offset + 1]! << 8) | (input[offset + 2]! << 16) | (input[offset + 3]! << 24) ) >>> 0;}
function toBytes(value: unknown): Uint8Array { if (value instanceof Uint8Array) return value; if (value instanceof ArrayBuffer) return new Uint8Array(value); if (ArrayBuffer.isView(value)) { return new Uint8Array(value.buffer, value.byteOffset, value.byteLength); } if (Array.isArray(value)) { const out = new Uint8Array(value.length); for (let i = 0; i < value.length; i++) out[i] = Number(value[i] ?? 0) & 255; return out; }
if (typeof value !== "object" || value === null) { throw new Error("packet is not a byte buffer"); }
const candidate = value as { length?: unknown; byteLength?: unknown; data?: unknown; [index: number]: unknown; [key: string]: unknown; };
if (Array.isArray(candidate.data)) { const out = new Uint8Array(candidate.data.length); for (let i = 0; i < candidate.data.length; i++) { out[i] = Number(candidate.data[i] ?? 0) & 255; } return out; }
const lengthValue = Number(candidate.length); const byteLengthValue = Number(candidate.byteLength); let size = Number.isFinite(lengthValue) ? Math.max(0, Math.floor(lengthValue)) : Number.isFinite(byteLengthValue) ? Math.max(0, Math.floor(byteLengthValue)) : -1;
if (size < 0) { let maxIndex = -1; for (const key of Object.keys(candidate)) { if (/^\d+$/.test(key)) maxIndex = Math.max(maxIndex, Number(key)); } if (maxIndex >= 0) size = maxIndex + 1; }
if (size < 0) throw new Error("packet has no length");
const out = new Uint8Array(size); for (let i = 0; i < size; i++) { out[i] = Number(candidate[i] ?? 0) & 255; } return out;}
// Compact binary payloads are faster than structured objects for hot loops.// Header: u16 passwordLen, u16 saltLen, u32 iterations, u16 keyBytes.export function encodeHashPacket( passwordBytes: Uint8Array, saltBytes: Uint8Array, iterations: number, keyBytes: number,): Uint8Array { const headerSize = 10; const out = new Uint8Array( headerSize + passwordBytes.length + saltBytes.length, ); writeU16LE(out, 0, passwordBytes.length); writeU16LE(out, 2, saltBytes.length); writeU32LE(out, 4, iterations); writeU16LE(out, 8, keyBytes); out.set(passwordBytes, headerSize); out.set(saltBytes, headerSize + passwordBytes.length); return out;}
function decodeHashPacket(packetLike: unknown): { password: Uint8Array; salt: Uint8Array; iterations: number; keyBytes: number;} { const packet = toBytes(packetLike); if (packet.length < 10) throw new Error("packet too small"); const passwordLen = readU16LE(packet, 0); const saltLen = readU16LE(packet, 2); const iterations = readU32LE(packet, 4); const keyBytes = readU16LE(packet, 8); const expected = 10 + passwordLen + saltLen; if (expected !== packet.length) throw new Error("packet size mismatch"); if (passwordLen < 8) throw new Error("password too short"); if (saltLen < MIN_SALT_BYTES || saltLen > MAX_SALT_BYTES) { throw new Error("salt size invalid"); } if (iterations < MIN_ITERATIONS || iterations > MAX_ITERATIONS) { throw new Error("iterations invalid"); } if (keyBytes < MIN_KEY_BYTES || keyBytes > MAX_KEY_BYTES) { throw new Error("key size invalid"); }
const password = packet.slice(10, 10 + passwordLen); const salt = packet.slice(10 + passwordLen, expected); return { password, salt, iterations, keyBytes };}
// Result packet: u16 saltLen, u16 hashLen, u32 iterations, then salt + hash.function encodeHashResultPacket( salt: Uint8Array, hash: Uint8Array, iterations: number,): Uint8Array { const out = new Uint8Array(8 + salt.length + hash.length); writeU16LE(out, 0, salt.length); writeU16LE(out, 2, hash.length); writeU32LE(out, 4, iterations); out.set(salt, 8); out.set(hash, 8 + salt.length); return out;}
export function decodeHashResultPacket(packet: Uint8Array): { iterations: number; saltBase64: string; hashBase64: string;} { if (packet.length < 8) throw new Error("result packet too small"); const saltLen = readU16LE(packet, 0); const hashLen = readU16LE(packet, 2); const iterations = readU32LE(packet, 4); const expected = 8 + saltLen + hashLen; if (expected !== packet.length) { throw new Error("result packet size mismatch"); }
const salt = packet.slice(8, 8 + saltLen); const hash = packet.slice(8 + saltLen, expected); return { iterations, saltBase64: bytesToBase64(salt), hashBase64: bytesToBase64(hash), };}
export async function hashPasswordPacketHost( packet: Uint8Array,): Promise<Uint8Array> { const decoded = decodeHashPacket(packet); const hash = await derivePbkdf2( decoded.password, decoded.salt, decoded.iterations, decoded.keyBytes, ); return encodeHashResultPacket(decoded.salt, hash, decoded.iterations);}
export const hashPasswordPacket = task<Uint8Array, Uint8Array>({ f: hashPasswordPacketHost,});
export async function hashPasswordPacketBatchFastHost( packets: Uint8Array[],): Promise<HashBatchSummary> { let outputBytes = 0; let digestXor = 0;
for (let i = 0; i < packets.length; i++) { const hashed = await hashPasswordPacketHost(packets[i]!); outputBytes += hashed.length; digestXor ^= hashed[hashed.length - 1] ?? 0; }
return { count: packets.length, outputBytes, digestXor };}
export const hashPasswordPacketBatchFast = task<Uint8Array[], HashBatchSummary>( { f: hashPasswordPacketBatchFastHost, },);
function fillDeterministicSalt(seed: number, bytes: number): Uint8Array { let x = (seed ^ 0x9e3779b9) >>> 0; const out = new Uint8Array(bytes); for (let i = 0; i < bytes; i++) { x ^= x << 13; x ^= x >>> 17; x ^= x << 5; out[i] = x & 255; } return out;}
export function makePasswordBytes(i: number): Uint8Array { return encoder.encode(`password-${i.toString(36)}-knitting`);}
export function makeHashPacketForIndex( i: number, iterations: number, keyBytes: number, saltBytes: number,): Uint8Array { const password = makePasswordBytes(i); const salt = fillDeterministicSalt(i + 1, saltBytes); return encodeHashPacket(password, salt, iterations, keyBytes);}
export function buildDemoHashPackets(options: DemoPacketOptions): Uint8Array[] { const count = clampInt(options.count, 1, 1, 2_000_000); const iterations = clampInt( options.iterations, DEFAULT_ITERATIONS, MIN_ITERATIONS, MAX_ITERATIONS, ); const keyBytes = clampInt( options.keyBytes, DEFAULT_KEY_BYTES, MIN_KEY_BYTES, MAX_KEY_BYTES, ); const saltBytes = clampInt( options.saltBytes, DEFAULT_SALT_BYTES, MIN_SALT_BYTES, MAX_SALT_BYTES, );
const packets = new Array<Uint8Array>(count); for (let i = 0; i < count; i++) { packets[i] = makeHashPacketForIndex(i, iterations, keyBytes, saltBytes); } return packets;}
export function hashSummaryFromOutputs( outputs: Uint8Array[],): HashBatchSummary { let outputBytes = 0; let digestXor = 0; for (let i = 0; i < outputs.length; i++) { const out = outputs[i]!; outputBytes += out.length; digestXor ^= out[out.length - 1] ?? 0; } return { count: outputs.length, outputBytes, digestXor };}
export function utf8(bytes: Uint8Array): string { return decoder.decode(bytes);}The ideal offload candidate
Section titled “The ideal offload candidate”Password hashing is one of those tasks where workers are almost always worth it. The work is CPU-bound, each call is independent, the input and output are small, and — critically — you want it to be slow (high iterations = harder to brute force). That’s a perfect storm for offloading: expensive per-call work that would otherwise block your event loop during login/signup spikes.