From e0e95f80c0d22e91a8c22a37db4667a183a7ead2 Mon Sep 17 00:00:00 2001 From: kerms Date: Thu, 12 Mar 2026 12:09:47 +0100 Subject: [PATCH] feat(nvs): normalize result with dropped/clamped counts, blob compat check normalizePartition now returns NormalizeResult { partition, dropped, clamped } so callers can report exactly what happened during import. Clamp functions report when values were modified. Add checkBlobCompatibility() for proactive size warnings on version switch/merge. Also includes reconcileBlobTypes at all import boundaries and BLOB_IDX whitelist enforcement. --- lib/nvs/index.ts | 5 + lib/nvs/nvs-binary-parser.ts | 4 +- lib/nvs/nvs-csv-parser.ts | 41 +++++++- lib/nvs/nvs-csv-serializer.ts | 12 ++- lib/nvs/nvs-partition.ts | 176 +++++++++++++++++++++++++++++++++- lib/shared/binary-reader.ts | 12 ++- lib/shared/binary-writer.ts | 9 +- 7 files changed, 240 insertions(+), 19 deletions(-) diff --git a/lib/nvs/index.ts b/lib/nvs/index.ts index 24d3134..cbdbf0f 100644 --- a/lib/nvs/index.ts +++ b/lib/nvs/index.ts @@ -7,6 +7,8 @@ export type { NvsEncoding, } from './types'; +export type { NormalizeResult } from './nvs-partition'; + export { NvsType, NvsVersion, @@ -53,4 +55,7 @@ export { validatePartition, sortEntries, generateEntryId, + normalizePartition, + reconcileBlobTypes, + checkBlobCompatibility, } from './nvs-partition'; diff --git a/lib/nvs/nvs-binary-parser.ts b/lib/nvs/nvs-binary-parser.ts index ec0af06..ffa0e3b 100644 --- a/lib/nvs/nvs-binary-parser.ts +++ b/lib/nvs/nvs-binary-parser.ts @@ -9,7 +9,7 @@ import { readU8, readU16, readU32, readI8, readI16, readI32, readU64, readI64, readNullTermString, } from '../shared/binary-reader'; -import { generateEntryId } from './nvs-partition'; +import { generateEntryId, reconcileBlobTypes } from './nvs-partition'; // ── Entry state bitmap ───────────────────────────────────────────── @@ -368,7 +368,7 @@ export function parseBinary(data: Uint8Array): NvsPartition { } return { - entries: Array.from(entryMap.values()), + entries: reconcileBlobTypes(Array.from(entryMap.values()), detectedVersion), namespaces, version: detectedVersion, }; diff --git a/lib/nvs/nvs-csv-parser.ts b/lib/nvs/nvs-csv-parser.ts index 8642ba4..b11eb5c 100644 --- a/lib/nvs/nvs-csv-parser.ts +++ b/lib/nvs/nvs-csv-parser.ts @@ -1,6 +1,6 @@ import type { NvsPartition, NvsEntry, NvsEncoding } from './types'; import { NvsType, NvsVersion, ENCODING_TO_TYPE } from './types'; -import { generateEntryId } from './nvs-partition'; +import { generateEntryId, reconcileBlobTypes } from './nvs-partition'; /** * Parse a line respecting quoted fields. @@ -73,9 +73,12 @@ function parseBigIntValue(str: string): bigint { } } -/** Decode hex string (e.g. "48656c6c6f") to Uint8Array */ +/** Decode hex string (e.g. "48656c6c6f") to Uint8Array. Throws on non-hex characters. */ function hexToBytes(hex: string): Uint8Array { hex = hex.replace(/\s/g, ''); + if (hex.length > 0 && !/^[0-9a-fA-F]+$/.test(hex)) { + throw new Error('Invalid hex string: contains non-hex characters'); + } const bytes = new Uint8Array(hex.length / 2); for (let i = 0; i < bytes.length; i++) { bytes[i] = parseInt(hex.substring(i * 2, i * 2 + 2), 16); @@ -144,6 +147,7 @@ export function parseCsv(text: string): NvsPartition { const entries: NvsEntry[] = []; const namespaces: string[] = []; let currentNamespace = ''; + let inferredVersion = NvsVersion.V2; for (let i = 0; i < lines.length; i++) { const line = lines[i].trim(); @@ -157,7 +161,8 @@ export function parseCsv(text: string): NvsPartition { const key = fields[0]; const type = fields[1]; - const encoding = (fields[2] || '').toLowerCase() as NvsEncoding | ''; + const rawEncoding = (fields[2] || '').toLowerCase(); + const encoding = rawEncoding as NvsEncoding | ''; const value = fields[3] || ''; if (type === 'namespace') { @@ -180,6 +185,33 @@ export function parseCsv(text: string): NvsPartition { throw new Error(`Line ${i + 1}: key "${key}" missing encoding`); } + // --- nvs_partition_gen.py-compatible encodings (not in NvsEncoding) --- + if (rawEncoding === 'hex2bin') { + const hexClean = value.replace(/\s/g, ''); + if (hexClean.length % 2 !== 0) throw new Error(`Line ${i + 1}: hex2bin value must have even number of hex chars`); + entries.push({ + id: generateEntryId(), + namespace: currentNamespace, + key, + type: NvsType.BLOB_DATA, + value: hexClean.length === 0 ? new Uint8Array(0) : hexToBytes(hexClean), + }); + continue; + } + if (rawEncoding === 'base64') { + const decoded = tryBase64Decode(value); + if (!decoded) throw new Error(`Line ${i + 1}: invalid base64 value for key "${key}"`); + entries.push({ + id: generateEntryId(), + namespace: currentNamespace, + key, + type: NvsType.BLOB_DATA, + value: decoded, + }); + continue; + } + // --- end nvs_partition_gen.py encodings --- + const nvsType = ENCODING_TO_TYPE[encoding as NvsEncoding]; if (nvsType === undefined) { throw new Error(`Line ${i + 1}: unknown encoding "${encoding}"`); @@ -200,6 +232,7 @@ export function parseCsv(text: string): NvsPartition { break; case 'blob': case 'binary': { + if (encoding === 'blob') inferredVersion = NvsVersion.V1; if (type === 'file') { // In browser context, file paths can't be resolved. // Store an empty Uint8Array — the UI should handle file picking. @@ -229,5 +262,5 @@ export function parseCsv(text: string): NvsPartition { }); } - return { entries, namespaces, version: NvsVersion.V2 }; + return { entries: reconcileBlobTypes(entries, inferredVersion), namespaces, version: inferredVersion }; } diff --git a/lib/nvs/nvs-csv-serializer.ts b/lib/nvs/nvs-csv-serializer.ts index cfadbc8..c8dee6c 100644 --- a/lib/nvs/nvs-csv-serializer.ts +++ b/lib/nvs/nvs-csv-serializer.ts @@ -1,5 +1,5 @@ import type { NvsPartition } from './types'; -import { NvsType, TYPE_TO_ENCODING, isPrimitiveType } from './types'; +import { NvsType, NvsVersion, TYPE_TO_ENCODING, isPrimitiveType } from './types'; /** Convert Uint8Array to hex string */ function bytesToHex(data: Uint8Array): string { @@ -52,8 +52,14 @@ export function serializeCsv(partition: NvsPartition): string { } else if (entry.type === NvsType.SZ) { valueStr = escapeCsvField(entry.value as string); } else { - // BLOB / BLOB_DATA — hex encode - valueStr = bytesToHex(entry.value as Uint8Array); + // BLOB / BLOB_DATA — version-aware encoding + const hex = bytesToHex(entry.value as Uint8Array); + // V1 uses 'blob' (monolithic single entry in binary). + // V2 uses 'hex2bin' (chunked BLOB_DATA + BLOB_IDX); nvs_partition_gen.py + // rejects 'blob' for V2 and would produce a V1 binary from it. + const csvEncoding = partition.version === NvsVersion.V1 ? 'blob' : 'hex2bin'; + lines.push(`${escapeCsvField(entry.key)},data,${csvEncoding},${hex}`); + continue; } lines.push(`${escapeCsvField(entry.key)},data,${encoding},${valueStr}`); diff --git a/lib/nvs/nvs-partition.ts b/lib/nvs/nvs-partition.ts index 5da696f..f4ad080 100644 --- a/lib/nvs/nvs-partition.ts +++ b/lib/nvs/nvs-partition.ts @@ -2,6 +2,15 @@ import type { NvsEntry, NvsPartition, NvsFlashStats } from './types'; import { NvsType, NvsVersion, isPrimitiveType } from './types'; import { ENTRIES_PER_PAGE, ENTRY_SIZE, PAGE_SIZE, MAX_KEY_LENGTH, MAX_NAMESPACES, MAX_STRING_LENGTH, MAX_BLOB_SIZE_V1, MAX_BLOB_SIZE_V2 } from './constants'; +/** Result of normalizing a raw deserialized partition. */ +export interface NormalizeResult { + partition: NvsPartition; + /** Entries that were completely unsalvageable and removed. */ + dropped: number; + /** Entries whose numeric values were clamped to fit the type range. */ + clamped: number; +} + /** Generate a random unique ID for client-side entry tracking */ export function generateEntryId(): string { return Math.random().toString(36).slice(2) + Date.now().toString(36); @@ -102,7 +111,7 @@ export function mergePartitions( } } - return { ...target, entries, namespaces }; + return { ...target, entries: reconcileBlobTypes(entries, target.version), namespaces }; } /** Calculate the entry span for a single NvsEntry */ @@ -192,6 +201,9 @@ export function validatePartition(partition: NvsPartition): string[] { if (ns.length > MAX_KEY_LENGTH) { errors.push(`Namespace "${ns}" exceeds ${MAX_KEY_LENGTH} characters`); } + if ([...ns].some(c => c.charCodeAt(0) > 0xFF)) { + errors.push(`Namespace "${ns}" contains non-Latin-1 characters (binary format only supports 8-bit characters)`); + } } for (const entry of partition.entries) { @@ -201,6 +213,9 @@ export function validatePartition(partition: NvsPartition): string[] { if (entry.key.length > MAX_KEY_LENGTH) { errors.push(`Key "${entry.key}" exceeds ${MAX_KEY_LENGTH} characters`); } + if ([...entry.key].some(c => c.charCodeAt(0) > 0xFF)) { + errors.push(`Key "${entry.key}" contains non-Latin-1 characters (binary format only supports 8-bit characters)`); + } if (!partition.namespaces.includes(entry.namespace)) { errors.push(`Key "${entry.key}" references unregistered namespace "${entry.namespace}"`); } @@ -251,6 +266,18 @@ export function validatePartition(partition: NvsPartition): string[] { errors.push(`"${entry.key}" BLOB ${entry.value.length} bytes exceeds V2 limit ${MAX_BLOB_SIZE_V2}`); } } + + // BLOB_IDX is an internal serializer type — it must never appear as a user entry. + if (entry.type === NvsType.BLOB_IDX) { + errors.push(`"${entry.key}" has internal-only type BLOB_IDX (synthesized by serializer, not valid user input)`); + } + // Version/type consistency — prevents poisoned binaries. + if (entry.type === NvsType.BLOB_DATA && partition.version === NvsVersion.V1) { + errors.push(`"${entry.key}" has V2-only type BLOB_DATA in a V1 (IDF < v4.0) partition`); + } + if (entry.type === NvsType.BLOB && partition.version === NvsVersion.V2) { + errors.push(`"${entry.key}" has V1-only type BLOB in a V2 (IDF ≥ v4.0) partition`); + } } // Check for duplicate (namespace, key) pairs @@ -274,3 +301,150 @@ export function sortEntries(partition: NvsPartition): NvsPartition { }); return { ...partition, entries }; } + +/** + * Coerce BLOB/BLOB_DATA types to match partition version. + * V1 partitions use monolithic BLOB (0x41); V2 partitions use chunked BLOB_DATA (0x42). + * Must be called at every import boundary (JSON, localStorage, binary parser). + */ +export function reconcileBlobTypes(entries: NvsEntry[], version: NvsVersion): NvsEntry[] { + return entries.map(e => { + if (version === NvsVersion.V1 && e.type === NvsType.BLOB_DATA) return { ...e, type: NvsType.BLOB }; + if (version === NvsVersion.V2 && e.type === NvsType.BLOB) return { ...e, type: NvsType.BLOB_DATA }; + return e; + }); +} + +/** + * Normalize and validate a raw deserialized object into a well-formed NvsPartition. + * Single gate for all deserialization paths (localStorage restore + JSON import/merge). + * Never throws. Regenerates missing/duplicate ids. Strips NUL bytes from keys and namespaces. + * Returns metadata about dropped and clamped entries for UI warnings. + */ +export function normalizePartition(raw: unknown): NormalizeResult { + if (!raw || typeof raw !== 'object' || Array.isArray(raw)) { + return { partition: createEmptyPartition(), dropped: 0, clamped: 0 }; + } + const obj = raw as Record; + + const VALID_VERSIONS = new Set([NvsVersion.V1, NvsVersion.V2]); + const version: NvsVersion = + typeof obj.version === 'number' && VALID_VERSIONS.has(obj.version) + ? (obj.version as NvsVersion) + : NvsVersion.V2; + + // BLOB_IDX (0x48) is synthesized internally by the serializer; it is never a valid + // user entry. All other NvsType values are acceptable user input. + const VALID_TYPES = new Set([ + NvsType.U8, NvsType.I8, NvsType.U16, NvsType.I16, + NvsType.U32, NvsType.I32, NvsType.U64, NvsType.I64, + NvsType.SZ, NvsType.BLOB, NvsType.BLOB_DATA, + ]); + const rawEntries = Array.isArray(obj.entries) ? obj.entries : []; + const seenIds = new Set(); + const entries: NvsEntry[] = []; + let dropped = 0; + let clamped = 0; + + for (const re of rawEntries) { + if (!re || typeof re !== 'object' || Array.isArray(re)) { dropped++; continue; } + const r = re as Record; + if (typeof r.type !== 'number' || !VALID_TYPES.has(r.type)) { dropped++; continue; } + const type = r.type as NvsType; + const namespace = typeof r.namespace === 'string' ? r.namespace.replace(/\0/g, '') : ''; + if (typeof r.key !== 'string') { dropped++; continue; } + const key = r.key.replace(/\0/g, ''); + if (key.length === 0) { dropped++; continue; } + const result = _normalizeEntryValue(type, r.value); + if (result === null) { dropped++; continue; } + if (result.clamped) clamped++; + let id = typeof r.id === 'string' && r.id.length > 0 ? r.id : ''; + if (!id || seenIds.has(id)) id = generateEntryId(); + seenIds.add(id); + entries.push({ id, namespace, key, type, value: result.value }); + } + + const reconciledEntries = reconcileBlobTypes(entries, version); + + // Rebuild namespaces: preserve stored order, deduplicate, add missing, drop unused + const rawNs = Array.isArray(obj.namespaces) ? obj.namespaces : []; + const orderedNs = (rawNs.filter((n): n is string => typeof n === 'string')) + .reduce((acc, n) => { if (!acc.includes(n)) acc.push(n); return acc; }, []); + for (const e of reconciledEntries) { + if (e.namespace && !orderedNs.includes(e.namespace)) orderedNs.push(e.namespace); + } + const usedNs = new Set(reconciledEntries.map(e => e.namespace)); + const namespaces = orderedNs.filter(n => usedNs.has(n)); + + return { partition: { entries: reconciledEntries, namespaces, version }, dropped, clamped }; +} + +/** Returns normalized value for type, or null if unsalvageable. `clamped` is true if the value was modified. */ +function _normalizeEntryValue(type: NvsType, raw: unknown): { value: NvsEntry['value']; clamped: boolean } | null { + // U64/I64 MUST come before isPrimitiveType() check — isPrimitiveType includes them + // but they require BigInt to avoid Number() precision loss above 2^53. + if (type === NvsType.U64 || type === NvsType.I64) { + if (typeof raw === 'bigint') return _clampBigInt(type, raw); + if (typeof raw === 'number') return _clampBigInt(type, BigInt(Math.trunc(raw))); + if (typeof raw === 'string') { + try { return _clampBigInt(type, BigInt(raw)); } catch { return null; } + } + return null; + } + if (isPrimitiveType(type)) { + let n: number; + if (typeof raw === 'number') n = raw; + else if (typeof raw === 'string') { n = Number(raw); if (Number.isNaN(n)) return null; } + else return null; + return _clampPrimitive(type, Math.trunc(n)); + } + if (type === NvsType.SZ) return typeof raw === 'string' ? { value: raw, clamped: false } : null; + // BLOB / BLOB_DATA / BLOB_IDX — already revived by partitionFromJson reviver + if (raw instanceof Uint8Array) return { value: raw, clamped: false }; + return null; // malformed/missing blob payload — drop the entry +} + +function _clampPrimitive(type: NvsType, n: number): { value: number; clamped: boolean } { + let v: number; + switch (type) { + case NvsType.U8: v = Math.max(0, Math.min(0xFF, n)); break; + case NvsType.I8: v = Math.max(-128, Math.min(127, n)); break; + case NvsType.U16: v = Math.max(0, Math.min(0xFFFF, n)); break; + case NvsType.I16: v = Math.max(-32768, Math.min(32767, n)); break; + case NvsType.U32: v = Math.max(0, Math.min(0xFFFFFFFF, n)); break; + case NvsType.I32: v = Math.max(-2147483648, Math.min(2147483647, n)); break; + default: v = n; + } + return { value: v, clamped: v !== n }; +} + +function _clampBigInt(type: NvsType, v: bigint): { value: bigint; clamped: boolean } { + let r: bigint; + if (type === NvsType.U64) { + r = v < 0n ? 0n : v > 0xFFFFFFFFFFFFFFFFn ? 0xFFFFFFFFFFFFFFFFn : v; + } else { + // I64 + r = v < -9223372036854775808n ? -9223372036854775808n + : v > 9223372036854775807n ? 9223372036854775807n : v; + } + return { value: r, clamped: r !== v }; +} + +/** + * Check blob entries against the target version's size limit. + * Returns human-readable warnings for each oversized blob. + */ +export function checkBlobCompatibility( + entries: NvsEntry[], + targetVersion: NvsVersion, +): string[] { + const limit = targetVersion === NvsVersion.V1 ? MAX_BLOB_SIZE_V1 : MAX_BLOB_SIZE_V2; + const warnings: string[] = []; + for (const e of entries) { + if ((e.type === NvsType.BLOB || e.type === NvsType.BLOB_DATA) && + e.value instanceof Uint8Array && e.value.length > limit) { + warnings.push(`"${e.key}" (${e.value.length}B) 超出限制 ${limit}B`); + } + } + return warnings; +} diff --git a/lib/shared/binary-reader.ts b/lib/shared/binary-reader.ts index 6c3c90e..f3eafec 100644 --- a/lib/shared/binary-reader.ts +++ b/lib/shared/binary-reader.ts @@ -37,10 +37,12 @@ export function readI64(buf: Uint8Array, off: number): bigint { return v > 0x7FFFFFFFFFFFFFFFn ? v - 0x10000000000000000n : v; } -/** Read null-terminated ASCII string of max `maxLen` bytes */ +/** Read null-terminated byte string of max `maxLen` bytes, preserving all byte values 0x01–0xFF */ export function readNullTermString(buf: Uint8Array, off: number, maxLen: number): string { - let end = off; - while (end < off + maxLen && buf[end] !== 0) end++; - const decoder = new TextDecoder('ascii'); - return decoder.decode(buf.subarray(off, end)); + let result = ''; + for (let i = off; i < off + maxLen; i++) { + if (buf[i] === 0) break; + result += String.fromCharCode(buf[i]); + } + return result; } diff --git a/lib/shared/binary-writer.ts b/lib/shared/binary-writer.ts index a586dd9..29fb951 100644 --- a/lib/shared/binary-writer.ts +++ b/lib/shared/binary-writer.ts @@ -41,10 +41,11 @@ export function writeI64(buf: Uint8Array, off: number, val: bigint) { writeU64(buf, off, u); } -/** Write null-terminated ASCII string padded to `fieldSize` bytes */ +/** Write null-terminated byte string padded to `fieldSize` bytes, preserving all byte values 0x01–0xFF */ export function writeNullTermString(buf: Uint8Array, off: number, str: string, fieldSize: number) { buf.fill(0, off, off + fieldSize); - const encoder = new TextEncoder(); - const strBytes = encoder.encode(str); - buf.set(strBytes.subarray(0, fieldSize - 1), off); // leave room for null + const len = Math.min(str.length, fieldSize - 1); + for (let i = 0; i < len; i++) { + buf[off + i] = str.charCodeAt(i) & 0xFF; + } }