feat(nvs): add NVS editor and binary/CSV toolkit

This commit is contained in:
kerms 2026-02-22 13:17:49 +01:00
parent 8320bf7ab2
commit b23a7e5c8a
9 changed files with 2178 additions and 0 deletions

View File

@ -0,0 +1,649 @@
<script setup lang="ts">
import { ref, computed, reactive } from 'vue';
import {
type NvsPartition, type NvsEntry, type NvsEncoding, type NvsFlashStats,
NvsType, NvsVersion,
ENCODING_OPTIONS, ENCODING_TO_TYPE, TYPE_TO_ENCODING,
isPrimitiveType,
createEmptyPartition, addEntry, removeEntry, updateEntry,
duplicateEntry, mergePartitions, calculateFlashStats,
validatePartition, sortEntries, generateEntryId,
parseBinary, serializeBinary, parseCsv, serializeCsv,
MAX_KEY_LENGTH, PAGE_SIZE,
} from '../../lib/nvs';
const props = defineProps<{
isDark?: boolean;
}>();
// Core state
const partition = ref<NvsPartition>(createEmptyPartition());
const targetSize = ref(0x4000); // 16KB default
// UI state
const namespaceFilter = ref('');
const keySearch = ref('');
const mergeMode = ref<'overwrite' | 'skip'>('overwrite');
const statusMessage = ref('');
const statusType = ref<'success' | 'error' | 'info'>('info');
// Add entry dialog
const showAddDialog = ref(false);
const newEntry = reactive({
namespace: '',
key: '',
encoding: 'u8' as NvsEncoding,
value: '',
});
// Add namespace dialog
const showNsDialog = ref(false);
const newNamespace = ref('');
// Computed
const flashStats = computed<NvsFlashStats>(() =>
calculateFlashStats(partition.value, targetSize.value),
);
const errors = computed(() => validatePartition(partition.value));
const filteredEntries = computed(() => {
let entries = partition.value.entries;
if (namespaceFilter.value) {
entries = entries.filter(e => e.namespace === namespaceFilter.value);
}
if (keySearch.value) {
entries = entries.filter(e => e.key.includes(keySearch.value));
}
return entries;
});
const progressColor = computed(() => {
const pct = flashStats.value.usagePercent;
if (pct >= 85) return '#F56C6C';
if (pct >= 60) return '#E6A23C';
return '#67C23A';
});
const sizeOptions = [
{ label: '12 KB (3页)', value: 0x3000 },
{ label: '16 KB (4页)', value: 0x4000 },
{ label: '20 KB (5页)', value: 0x5000 },
{ label: '24 KB (6页)', value: 0x6000 },
{ label: '32 KB (8页)', value: 0x8000 },
{ label: '64 KB (16页)', value: 0x10000 },
{ label: '128 KB (32页)', value: 0x20000 },
{ label: '256 KB (64页)', value: 0x40000 },
];
// Helpers
function showStatus(msg: string, type: 'success' | 'error' | 'info' = 'info') {
statusMessage.value = msg;
statusType.value = type;
setTimeout(() => { statusMessage.value = ''; }, 4000);
}
function getEncodingForType(type: NvsType): NvsEncoding {
return TYPE_TO_ENCODING[type] ?? 'u8';
}
function formatValue(entry: NvsEntry): string {
if (entry.value instanceof Uint8Array) {
if (entry.value.length <= 32) {
return Array.from(entry.value).map(b => b.toString(16).padStart(2, '0')).join(' ');
}
return Array.from(entry.value.subarray(0, 32))
.map(b => b.toString(16).padStart(2, '0')).join(' ') +
` ... (${entry.value.length} 字节)`;
}
return String(entry.value);
}
function downloadBlob(blob: Blob, filename: string) {
const url = URL.createObjectURL(blob);
const a = document.createElement('a');
a.href = url;
a.download = filename;
document.body.appendChild(a);
a.click();
document.body.removeChild(a);
setTimeout(() => URL.revokeObjectURL(url), 10000);
}
/** Parse a value string based on encoding */
function parseValueInput(encoding: NvsEncoding, raw: string): number | bigint | string | Uint8Array {
switch (encoding) {
case 'u8': case 'u16': case 'u32':
case 'i8': case 'i16': case 'i32': {
const str = raw.trim();
if (str.startsWith('0x') || str.startsWith('0X')) {
if (!/^-?0[xX][0-9a-fA-F]+$/.test(str)) throw new Error(`无效的整数值: "${str}"`);
return parseInt(str, 16);
}
if (!/^-?\d+$/.test(str)) throw new Error(`无效的整数值: "${str}"`);
return parseInt(str, 10);
}
case 'u64': case 'i64': {
const str64 = raw.trim();
if (str64.startsWith('-0x') || str64.startsWith('-0X')) {
if (!/^-0[xX][0-9a-fA-F]+$/.test(str64)) throw new Error(`无效的整数值: "${str64}"`);
return -BigInt(str64.slice(1));
}
try { return BigInt(str64); } catch { throw new Error(`无效的整数值: "${str64}"`); }
}
case 'string':
return raw;
case 'blob':
case 'binary': {
const hex = raw.replace(/\s/g, '');
if (hex.length === 0) return new Uint8Array(0);
if (hex.length % 2 !== 0) throw new Error(`十六进制字符串长度必须为偶数`);
if (!/^[0-9a-fA-F]+$/.test(hex)) throw new Error(`十六进制字符串包含无效字符`);
const bytes = new Uint8Array(hex.length / 2);
for (let i = 0; i < bytes.length; i++) {
bytes[i] = parseInt(hex.substring(i * 2, i * 2 + 2), 16);
}
return bytes;
}
default:
return raw;
}
}
// Actions: CRUD
function handleAddEntry() {
if (!newEntry.namespace || !newEntry.key) return;
const type = ENCODING_TO_TYPE[newEntry.encoding];
let value: ReturnType<typeof parseValueInput>;
try {
value = parseValueInput(newEntry.encoding, newEntry.value);
} catch (e: any) {
showStatus(e.message ?? '值格式错误', 'error');
return;
}
partition.value = addEntry(partition.value, {
namespace: newEntry.namespace,
key: newEntry.key,
type,
value,
});
showAddDialog.value = false;
newEntry.key = '';
newEntry.value = '';
showStatus('已添加记录', 'success');
}
function handleAddNamespace() {
const ns = newNamespace.value.trim();
if (!ns) return;
if (partition.value.namespaces.includes(ns)) {
showStatus('命名空间已存在', 'error');
return;
}
partition.value = {
...partition.value,
namespaces: [...partition.value.namespaces, ns],
};
showNsDialog.value = false;
newNamespace.value = '';
// Auto-select new namespace in add dialog
newEntry.namespace = ns;
showStatus(`已添加命名空间 "${ns}"`, 'success');
}
function handleDeleteEntry(entryId: string) {
partition.value = removeEntry(partition.value, entryId);
}
function handleDuplicateEntry(entryId: string) {
partition.value = duplicateEntry(partition.value, entryId);
showStatus('已复制记录', 'success');
}
function handleSort() {
partition.value = sortEntries(partition.value);
}
function handleClear() {
partition.value = createEmptyPartition(partition.value.version);
showStatus('已清空所有记录', 'info');
}
// Actions: Inline edit
function handleUpdateKey(entryId: string, newKey: string) {
partition.value = updateEntry(partition.value, entryId, { key: newKey });
}
function handleUpdateNamespace(entryId: string, ns: string) {
partition.value = updateEntry(partition.value, entryId, { namespace: ns });
}
function handleUpdateEncoding(entryId: string, encoding: NvsEncoding) {
const type = ENCODING_TO_TYPE[encoding];
// Reset value to sensible default when type changes
let value: NvsEntry['value'];
if (isPrimitiveType(type)) value = 0;
else if (type === NvsType.SZ) value = '';
else value = new Uint8Array(0);
partition.value = updateEntry(partition.value, entryId, { type, value });
}
function handleUpdateValue(entryId: string, encoding: NvsEncoding, raw: string) {
let value: ReturnType<typeof parseValueInput>;
try {
value = parseValueInput(encoding, raw);
} catch (e: any) {
showStatus(e.message ?? '值格式错误', 'error');
return;
}
partition.value = updateEntry(partition.value, entryId, { value });
}
// Actions: File I/O
async function handleOpenBinary(file: File): Promise<false> {
try {
const buffer = await file.arrayBuffer();
const data = new Uint8Array(buffer);
partition.value = parseBinary(data);
// Auto-set target size to match file
targetSize.value = data.byteLength;
showStatus(`已加载 ${file.name} (${data.byteLength} 字节)`, 'success');
} catch (e: any) {
showStatus(`加载失败: ${e.message}`, 'error');
}
return false; // prevent el-upload auto-upload
}
function handleExportBinary() {
try {
const errs = validatePartition(partition.value);
if (errs.length > 0) {
showStatus(`验证错误: ${errs[0]}`, 'error');
return;
}
const data = serializeBinary(partition.value, targetSize.value);
downloadBlob(new Blob([data]), 'nvs.bin');
showStatus('已导出 nvs.bin', 'success');
} catch (e: any) {
showStatus(`导出失败: ${e.message}`, 'error');
}
}
async function handleMergeBinary(file: File): Promise<false> {
try {
const buffer = await file.arrayBuffer();
const incoming = parseBinary(new Uint8Array(buffer));
partition.value = mergePartitions(partition.value, incoming, mergeMode.value);
showStatus(`已合并 ${file.name} (${incoming.entries.length} 条记录)`, 'success');
} catch (e: any) {
showStatus(`合并失败: ${e.message}`, 'error');
}
return false;
}
async function handleOpenCsv(file: File): Promise<false> {
try {
const text = await file.text();
partition.value = parseCsv(text);
showStatus(`已加载 ${file.name}`, 'success');
} catch (e: any) {
showStatus(`加载失败: ${e.message}`, 'error');
}
return false;
}
function handleExportCsv() {
try {
const text = serializeCsv(partition.value);
downloadBlob(new Blob([text], { type: 'text/csv;charset=utf-8' }), 'nvs.csv');
showStatus('已导出 nvs.csv', 'success');
} catch (e: any) {
showStatus(`导出失败: ${e.message}`, 'error');
}
}
async function handleMergeCsv(file: File): Promise<false> {
try {
const text = await file.text();
const incoming = parseCsv(text);
partition.value = mergePartitions(partition.value, incoming, mergeMode.value);
showStatus(`已合并 ${file.name} (${incoming.entries.length} 条记录)`, 'success');
} catch (e: any) {
showStatus(`合并失败: ${e.message}`, 'error');
}
return false;
}
async function handleBlobFileUpload(entryId: string, file: File): Promise<false> {
try {
const buffer = await file.arrayBuffer();
const data = new Uint8Array(buffer);
partition.value = updateEntry(partition.value, entryId, { value: data });
showStatus(`已上传 ${file.name} (${data.length} 字节)`, 'success');
} catch (e: any) {
showStatus(`上传失败: ${e.message}`, 'error');
}
return false;
}
</script>
<template>
<div>
<!-- Status message -->
<transition name="el-fade-in">
<el-alert
v-if="statusMessage"
:title="statusMessage"
:type="statusType"
show-icon
closable
class="mb-3"
@close="statusMessage = ''"
/>
</transition>
<!-- Validation errors -->
<el-alert
v-if="errors.length > 0"
type="warning"
show-icon
class="mb-3"
:closable="false"
>
<template #title>
验证问题 ({{ errors.length }})
</template>
<div v-for="(err, i) in errors" :key="i" class="text-xs">{{ err }}</div>
</el-alert>
<!-- Toolbar -->
<div class="flex flex-wrap items-center gap-2 mb-3">
<el-button type="primary" @click="showAddDialog = true">
添加记录
</el-button>
<el-button @click="showNsDialog = true">
添加命名空间
</el-button>
<el-button @click="handleSort">排序</el-button>
<el-button type="danger" plain @click="handleClear">清空</el-button>
<el-divider direction="vertical" />
<span class="text-sm">分区大小:</span>
<el-select v-model="targetSize" style="width: 160px;">
<el-option
v-for="opt in sizeOptions"
:key="opt.value"
:label="opt.label"
:value="opt.value"
/>
</el-select>
<el-divider direction="vertical" />
<div class="flex items-center gap-2 min-w-[200px]">
<el-progress
:percentage="flashStats.usagePercent"
:color="progressColor"
:stroke-width="14"
:show-text="false"
style="flex: 1;"
/>
<el-text size="small">
{{ flashStats.usedEntries }} / {{ flashStats.maxEntries }} 条目
</el-text>
</div>
</div>
<!-- Filter row -->
<div class="flex flex-wrap items-center gap-2 mb-3">
<el-select
v-model="namespaceFilter"
placeholder="全部命名空间"
clearable
style="width: 180px;"
>
<el-option
v-for="ns in partition.namespaces"
:key="ns"
:label="ns"
:value="ns"
/>
</el-select>
<el-input
v-model="keySearch"
placeholder="搜索键名..."
clearable
style="width: 200px;"
/>
</div>
<!-- Data table -->
<el-table
:data="filteredEntries"
border
stripe
size="small"
row-key="id"
empty-text="暂无记录,请添加或导入数据"
max-height="500"
>
<el-table-column label="命名空间" width="150">
<template #default="{ row }">
<el-select
:model-value="row.namespace"
size="small"
@change="(val: string) => handleUpdateNamespace(row.id, val)"
>
<el-option
v-for="ns in partition.namespaces"
:key="ns"
:label="ns"
:value="ns"
/>
</el-select>
</template>
</el-table-column>
<el-table-column label="键名" width="180">
<template #default="{ row }">
<el-input
:model-value="row.key"
size="small"
:maxlength="MAX_KEY_LENGTH"
@change="(val: string) => handleUpdateKey(row.id, val)"
/>
</template>
</el-table-column>
<el-table-column label="类型" width="120">
<template #default="{ row }">
<el-select
:model-value="getEncodingForType(row.type)"
size="small"
@change="(val: NvsEncoding) => handleUpdateEncoding(row.id, val)"
>
<el-option
v-for="enc in ENCODING_OPTIONS"
:key="enc"
:label="enc"
:value="enc"
/>
</el-select>
</template>
</el-table-column>
<el-table-column label="值" min-width="250">
<template #default="{ row }">
<!-- Primitive types: input -->
<el-input
v-if="isPrimitiveType(row.type)"
:model-value="String(row.value)"
size="small"
@change="(val: string) => handleUpdateValue(row.id, getEncodingForType(row.type), val)"
/>
<!-- String type -->
<el-input
v-else-if="row.type === NvsType.SZ"
:model-value="row.value as string"
size="small"
type="textarea"
:autosize="{ minRows: 1, maxRows: 3 }"
@change="(val: string) => handleUpdateValue(row.id, 'string', val)"
/>
<!-- Blob types -->
<div v-else class="flex items-center gap-1">
<el-text size="small" class="font-mono" truncated>
{{ formatValue(row) }}
</el-text>
<el-upload
:before-upload="(file: File) => handleBlobFileUpload(row.id, file)"
:show-file-list="false"
accept="*/*"
>
<el-button size="small" type="info" plain>上传文件</el-button>
</el-upload>
</div>
</template>
</el-table-column>
<el-table-column label="操作" width="120" fixed="right">
<template #default="{ row }">
<el-button
size="small"
text
@click="handleDuplicateEntry(row.id)"
title="复制"
>
复制
</el-button>
<el-popconfirm
title="确定删除?"
@confirm="handleDeleteEntry(row.id)"
>
<template #reference>
<el-button size="small" text type="danger" title="删除">
删除
</el-button>
</template>
</el-popconfirm>
</template>
</el-table-column>
</el-table>
<!-- Import/Export section -->
<el-divider />
<div class="flex flex-wrap gap-4">
<!-- Binary -->
<div>
<el-text tag="b" class="block mb-2">二进制文件 (.bin)</el-text>
<div class="flex flex-wrap gap-2">
<el-upload :before-upload="handleOpenBinary" :show-file-list="false" accept=".bin">
<el-button>打开</el-button>
</el-upload>
<el-button type="primary" @click="handleExportBinary">导出</el-button>
<el-upload :before-upload="handleMergeBinary" :show-file-list="false" accept=".bin">
<el-button>合并</el-button>
</el-upload>
</div>
</div>
<!-- CSV -->
<div>
<el-text tag="b" class="block mb-2">CSV文件 (.csv)</el-text>
<div class="flex flex-wrap gap-2">
<el-upload :before-upload="handleOpenCsv" :show-file-list="false" accept=".csv">
<el-button>打开</el-button>
</el-upload>
<el-button type="primary" @click="handleExportCsv">导出</el-button>
<el-upload :before-upload="handleMergeCsv" :show-file-list="false" accept=".csv">
<el-button>合并</el-button>
</el-upload>
</div>
</div>
<!-- Merge options -->
<div>
<el-text tag="b" class="block mb-2">合并选项</el-text>
<el-radio-group v-model="mergeMode">
<el-radio value="overwrite">覆盖同名键</el-radio>
<el-radio value="skip">跳过同名键</el-radio>
</el-radio-group>
</div>
</div>
<!-- Add entry dialog -->
<el-dialog v-model="showAddDialog" title="添加记录" width="450px">
<el-form label-width="80px">
<el-form-item label="命名空间">
<el-select v-model="newEntry.namespace" placeholder="选择命名空间">
<el-option
v-for="ns in partition.namespaces"
:key="ns"
:label="ns"
:value="ns"
/>
</el-select>
</el-form-item>
<el-form-item label="键名">
<el-input v-model="newEntry.key" :maxlength="MAX_KEY_LENGTH" placeholder="key name" />
</el-form-item>
<el-form-item label="类型">
<el-select v-model="newEntry.encoding">
<el-option v-for="enc in ENCODING_OPTIONS" :key="enc" :label="enc" :value="enc" />
</el-select>
</el-form-item>
<el-form-item label="值">
<el-input
v-model="newEntry.value"
:type="newEntry.encoding === 'string' ? 'textarea' : 'text'"
placeholder="value"
/>
</el-form-item>
</el-form>
<template #footer>
<el-button @click="showAddDialog = false">取消</el-button>
<el-button type="primary" @click="handleAddEntry" :disabled="!newEntry.namespace || !newEntry.key">
添加
</el-button>
</template>
</el-dialog>
<!-- Add namespace dialog -->
<el-dialog v-model="showNsDialog" title="添加命名空间" width="400px">
<el-form label-width="80px">
<el-form-item label="名称">
<el-input
v-model="newNamespace"
:maxlength="MAX_KEY_LENGTH"
placeholder="namespace name"
@keyup.enter="handleAddNamespace"
/>
</el-form-item>
</el-form>
<template #footer>
<el-button @click="showNsDialog = false">取消</el-button>
<el-button type="primary" @click="handleAddNamespace" :disabled="!newNamespace.trim()">
添加
</el-button>
</template>
</el-dialog>
</div>
</template>
<style scoped>
.font-mono {
font-family: 'Courier New', Courier, monospace;
}
</style>

45
lib/nvs/constants.ts Normal file
View File

@ -0,0 +1,45 @@
/** Page size = one flash sector */
export const PAGE_SIZE = 4096;
/** Page header occupies bytes 0..31 */
export const PAGE_HEADER_SIZE = 32;
/** Entry state bitmap at bytes 32..63 */
export const BITMAP_OFFSET = 32;
export const BITMAP_SIZE = 32;
/** First entry starts at byte 64 */
export const FIRST_ENTRY_OFFSET = 64;
/** Each entry is 32 bytes */
export const ENTRY_SIZE = 32;
/** 126 entries per page: (4096 - 64) / 32 */
export const ENTRIES_PER_PAGE = 126;
/** Maximum key length (excluding null terminator) */
export const MAX_KEY_LENGTH = 15;
/** Key field size in entry (including null terminator padding) */
export const KEY_FIELD_SIZE = 16;
/** Data field size in entry */
export const DATA_FIELD_SIZE = 8;
/** Maximum string length including null terminator */
export const MAX_STRING_LENGTH = 4000;
/** Maximum blob data size V1 (single page, legacy) */
export const MAX_BLOB_SIZE_V1 = 1984;
/** Maximum blob data size V2 (multi-page) */
export const MAX_BLOB_SIZE_V2 = 508000;
/** Chunk index value meaning "not applicable" */
export const CHUNK_ANY = 0xFF;
/** Minimum partition size: 3 pages (12KB) */
export const MIN_PARTITION_SIZE = 3 * PAGE_SIZE;
/** Maximum number of namespaces per partition */
export const MAX_NAMESPACES = 254;

56
lib/nvs/index.ts Normal file
View File

@ -0,0 +1,56 @@
// Types and interfaces
export type {
NvsEntry,
NvsPartition,
NvsFlashStats,
NvsCsvRow,
NvsEncoding,
} from './types';
export {
NvsType,
NvsVersion,
PageState,
EntryState,
TYPE_TO_ENCODING,
ENCODING_TO_TYPE,
ENCODING_OPTIONS,
isPrimitiveType,
isVariableLengthType,
primitiveSize,
} from './types';
// Constants
export {
PAGE_SIZE, PAGE_HEADER_SIZE, BITMAP_OFFSET, BITMAP_SIZE,
FIRST_ENTRY_OFFSET, ENTRY_SIZE, ENTRIES_PER_PAGE,
MAX_KEY_LENGTH, KEY_FIELD_SIZE, DATA_FIELD_SIZE,
MAX_STRING_LENGTH, MAX_BLOB_SIZE_V1, MAX_BLOB_SIZE_V2,
CHUNK_ANY, MIN_PARTITION_SIZE, MAX_NAMESPACES,
} from './constants';
// CRC32 utility (re-exported from shared for backward compatibility)
export { crc32 } from '../shared/crc32';
// Binary operations
export { parseBinary } from './nvs-binary-parser';
export { serializeBinary } from './nvs-binary-serializer';
// CSV operations
export { parseCsv } from './nvs-csv-parser';
export { serializeCsv } from './nvs-csv-serializer';
// Partition manipulation
export {
createEmptyPartition,
addEntry,
removeEntry,
updateEntry,
duplicateEntry,
mergePartitions,
entrySpan,
calculateFlashStats,
validatePartition,
sortEntries,
generateEntryId,
} from './nvs-partition';

View File

@ -0,0 +1,374 @@
import { NvsPartition, NvsEntry, NvsType, NvsVersion, PageState, EntryState } from './types';
import {
PAGE_SIZE, PAGE_HEADER_SIZE, BITMAP_OFFSET, BITMAP_SIZE,
FIRST_ENTRY_OFFSET, ENTRY_SIZE, ENTRIES_PER_PAGE, KEY_FIELD_SIZE,
} from './constants';
import { crc32 } from '../shared/crc32';
import {
readU8, readU16, readU32, readI8, readI16, readI32, readU64, readI64,
readNullTermString,
} from '../shared/binary-reader';
import { generateEntryId } from './nvs-partition';
// ── Entry state bitmap ─────────────────────────────────────────────
function getEntryState(bitmap: Uint8Array, index: number): EntryState {
const bitPos = index * 2;
const byteIdx = Math.floor(bitPos / 8);
const bitOff = bitPos % 8;
return ((bitmap[byteIdx] >> bitOff) & 0x3) as EntryState;
}
// ── Entry CRC verification ─────────────────────────────────────────
/** Entry CRC is over bytes [0..3] + [8..31], skipping the CRC field [4..7] */
function computeEntryCrc(entryBytes: Uint8Array): number {
const crcData = new Uint8Array(28);
crcData.set(entryBytes.subarray(0, 4), 0); // nsIndex, type, span, chunkIndex
crcData.set(entryBytes.subarray(8, 32), 4); // key[16] + data[8]
return crc32(crcData);
}
// ── Page header CRC ────────────────────────────────────────────────
/** Page header CRC is over bytes [4..27] (seqNum, version, reserved) */
function computePageHeaderCrc(page: Uint8Array): number {
return crc32(page.subarray(4, 28));
}
// ── Raw parsed structures ──────────────────────────────────────────
interface RawEntry {
nsIndex: number;
type: number;
span: number;
chunkIndex: number;
crc: number;
key: string;
data: Uint8Array; // 8 bytes
// Additional data for multi-span entries
extraData: Uint8Array | null;
}
interface ParsedPage {
state: PageState;
seqNumber: number;
version: number;
entries: (RawEntry | null)[]; // null = EMPTY or ERASED
}
// ── Main parser ────────────────────────────────────────────────────
/**
* Parse an NVS binary partition into NvsPartition.
* @param data Raw binary data (must be multiple of 4096 bytes)
*/
export function parseBinary(data: Uint8Array): NvsPartition {
if (data.length % PAGE_SIZE !== 0) {
throw new Error(`二进制数据大小 (${data.length}) 不是页大小 (${PAGE_SIZE}) 的倍数`);
}
if (data.length === 0) {
throw new Error('二进制数据为空');
}
const pageCount = data.length / PAGE_SIZE;
const pages: ParsedPage[] = [];
// ── Phase 1: Parse all pages ──
for (let p = 0; p < pageCount; p++) {
const pageOff = p * PAGE_SIZE;
const pageData = data.subarray(pageOff, pageOff + PAGE_SIZE);
// Read page header
const state = readU32(pageData, 0) as PageState;
const seqNumber = readU32(pageData, 4);
const version = readU8(pageData, 8);
const storedCrc = readU32(pageData, 28);
// Skip EMPTY pages
if (state === PageState.EMPTY) continue;
// Verify page header CRC
const calcCrc = computePageHeaderCrc(pageData);
if (calcCrc !== storedCrc) {
// Corrupted page, skip
continue;
}
// Parse bitmap
const bitmap = pageData.subarray(BITMAP_OFFSET, BITMAP_OFFSET + BITMAP_SIZE);
// Parse entries
const rawEntries: (RawEntry | null)[] = [];
let entryIdx = 0;
while (entryIdx < ENTRIES_PER_PAGE) {
const entState = getEntryState(bitmap, entryIdx);
if (entState === EntryState.EMPTY) {
// All remaining entries are EMPTY
break;
}
if (entState === EntryState.ERASED) {
rawEntries.push(null);
entryIdx++;
continue;
}
// WRITTEN entry
const entOff = FIRST_ENTRY_OFFSET + entryIdx * ENTRY_SIZE;
const entryBytes = pageData.subarray(entOff, entOff + ENTRY_SIZE);
const nsIndex = readU8(entryBytes, 0);
const type = readU8(entryBytes, 1);
const span = readU8(entryBytes, 2);
const chunkIndex = readU8(entryBytes, 3);
const entryCrc = readU32(entryBytes, 4);
const key = readNullTermString(entryBytes, 8, KEY_FIELD_SIZE);
const entryData = new Uint8Array(entryBytes.subarray(24, 32));
// Reject nonsensical spans before CRC check
if (span < 1 || entryIdx + span > ENTRIES_PER_PAGE) {
entryIdx++; // skip this entry slot
continue;
}
// Verify entry CRC
const calcEntryCrc = computeEntryCrc(entryBytes);
if (calcEntryCrc !== entryCrc) {
// Corrupted entry, skip the span
entryIdx += span;
continue;
}
// Collect extra data for multi-span entries (SZ, BLOB, BLOB_DATA)
let extraData: Uint8Array | null = null;
if (span > 1) {
const extraLen = (span - 1) * ENTRY_SIZE;
const extraOff = FIRST_ENTRY_OFFSET + (entryIdx + 1) * ENTRY_SIZE;
if (extraOff + extraLen <= PAGE_SIZE) {
extraData = new Uint8Array(pageData.subarray(extraOff, extraOff + extraLen));
}
}
rawEntries.push({ nsIndex, type, span, chunkIndex, crc: entryCrc, key, data: entryData, extraData });
// Skip past the span (span >= 1 is guaranteed above)
entryIdx += span;
}
pages.push({ state, seqNumber, version, entries: rawEntries });
}
// Sort pages by sequence number (ascending) for proper deduplication
pages.sort((a, b) => a.seqNumber - b.seqNumber);
// Detect version from first valid page
const detectedVersion: NvsVersion = pages.length > 0 && pages[0].version === NvsVersion.V2
? NvsVersion.V2
: NvsVersion.V1;
// ── Phase 2: Build namespace map ──
const nsMap = new Map<number, string>(); // nsIndex → namespace name
const namespaces: string[] = [];
for (const page of pages) {
for (const entry of page.entries) {
if (!entry) continue;
// Namespace definitions have nsIndex=0 and type=U8
if (entry.nsIndex === 0 && entry.type === NvsType.U8) {
const assignedIdx = readU8(entry.data, 0);
nsMap.set(assignedIdx, entry.key);
if (!namespaces.includes(entry.key)) {
namespaces.push(entry.key);
}
}
}
}
// ── Phase 3: Resolve data entries (deduplication by last-write-wins) ──
// For V2 blobs, we need to collect BLOB_DATA and BLOB_IDX separately.
// Keys use \x00 as separator (NVS key/namespace names are C strings and cannot contain null bytes).
const blobDataChunks = new Map<string, Map<number, Uint8Array>>(); // "ns\x00key" → chunkIndex → data
const blobIdxEntries = new Map<string, { size: number; chunkCount: number; chunkStart: number }>();
const entryMap = new Map<string, NvsEntry>(); // "ns\x00key" → NvsEntry (last wins)
for (const page of pages) {
for (const entry of page.entries) {
if (!entry) continue;
if (entry.nsIndex === 0) continue; // Skip namespace definitions
const nsName = nsMap.get(entry.nsIndex);
if (!nsName) continue; // Unknown namespace, skip
const compositeKey = `${nsName}\x00${entry.key}`;
switch (entry.type) {
case NvsType.U8:
entryMap.set(compositeKey, {
id: generateEntryId(), namespace: nsName, key: entry.key,
type: NvsType.U8, value: readU8(entry.data, 0),
});
break;
case NvsType.I8:
entryMap.set(compositeKey, {
id: generateEntryId(), namespace: nsName, key: entry.key,
type: NvsType.I8, value: readI8(entry.data, 0),
});
break;
case NvsType.U16:
entryMap.set(compositeKey, {
id: generateEntryId(), namespace: nsName, key: entry.key,
type: NvsType.U16, value: readU16(entry.data, 0),
});
break;
case NvsType.I16:
entryMap.set(compositeKey, {
id: generateEntryId(), namespace: nsName, key: entry.key,
type: NvsType.I16, value: readI16(entry.data, 0),
});
break;
case NvsType.U32:
entryMap.set(compositeKey, {
id: generateEntryId(), namespace: nsName, key: entry.key,
type: NvsType.U32, value: readU32(entry.data, 0),
});
break;
case NvsType.I32:
entryMap.set(compositeKey, {
id: generateEntryId(), namespace: nsName, key: entry.key,
type: NvsType.I32, value: readI32(entry.data, 0),
});
break;
case NvsType.U64:
entryMap.set(compositeKey, {
id: generateEntryId(), namespace: nsName, key: entry.key,
type: NvsType.U64, value: readU64(entry.data, 0),
});
break;
case NvsType.I64:
entryMap.set(compositeKey, {
id: generateEntryId(), namespace: nsName, key: entry.key,
type: NvsType.I64, value: readI64(entry.data, 0),
});
break;
case NvsType.SZ: {
// String: size at data[0..1], dataCrc at data[4..7]
const size = readU16(entry.data, 0);
if (entry.extraData && size > 0) {
const payload = entry.extraData.subarray(0, size);
const storedDataCrc = readU32(entry.data, 4);
if (crc32(payload) !== storedDataCrc) break; // corrupted payload, skip
// Decode string (remove null terminator)
const str = new TextDecoder('utf-8').decode(payload.subarray(0, size - 1));
entryMap.set(compositeKey, {
id: generateEntryId(), namespace: nsName, key: entry.key,
type: NvsType.SZ, value: str,
});
}
break;
}
case NvsType.BLOB: {
// Legacy V1 blob: size at data[0..1], dataCrc at data[4..7]
const size = readU16(entry.data, 0);
if (entry.extraData && size > 0) {
const payload = entry.extraData.subarray(0, size);
const storedDataCrc = readU32(entry.data, 4);
if (crc32(payload) !== storedDataCrc) break; // corrupted payload, skip
entryMap.set(compositeKey, {
id: generateEntryId(), namespace: nsName, key: entry.key,
type: NvsType.BLOB, value: new Uint8Array(payload),
});
} else {
entryMap.set(compositeKey, {
id: generateEntryId(), namespace: nsName, key: entry.key,
type: NvsType.BLOB, value: new Uint8Array(0),
});
}
break;
}
case NvsType.BLOB_DATA: {
// V2 blob data chunk
const size = readU16(entry.data, 0);
if (!blobDataChunks.has(compositeKey)) {
blobDataChunks.set(compositeKey, new Map());
}
if (entry.extraData && size > 0) {
const payload = entry.extraData.subarray(0, size);
const storedDataCrc = readU32(entry.data, 4);
if (crc32(payload) !== storedDataCrc) break; // corrupted chunk, skip
blobDataChunks.get(compositeKey)!.set(entry.chunkIndex, new Uint8Array(payload));
} else {
blobDataChunks.get(compositeKey)!.set(entry.chunkIndex, new Uint8Array(0));
}
break;
}
case NvsType.BLOB_IDX: {
// V2 blob index
const totalSize = readU32(entry.data, 0);
const chunkCount = readU8(entry.data, 4);
const chunkStart = readU8(entry.data, 5);
blobIdxEntries.set(compositeKey, { size: totalSize, chunkCount, chunkStart });
break;
}
}
}
}
// ── Phase 4: Reassemble V2 blobs ──
for (const [compositeKey, idxInfo] of blobIdxEntries) {
const chunks = blobDataChunks.get(compositeKey);
if (!chunks) continue;
const assembled = new Uint8Array(idxInfo.size);
let offset = 0;
let chunksValid = true;
for (let i = idxInfo.chunkStart; i < idxInfo.chunkStart + idxInfo.chunkCount; i++) {
const chunk = chunks.get(i);
if (!chunk) {
chunksValid = false; // missing chunk — cannot reassemble correctly
break;
}
assembled.set(chunk, offset);
offset += chunk.length;
}
if (!chunksValid) continue; // skip blob with missing chunks rather than return corrupted data
if (offset !== idxInfo.size) continue; // chunk sizes don't match declared total — zero tail would result
const sepIdx = compositeKey.indexOf('\x00');
const nsName = compositeKey.substring(0, sepIdx);
const key = compositeKey.substring(sepIdx + 1);
entryMap.set(compositeKey, {
id: generateEntryId(),
namespace: nsName,
key,
type: NvsType.BLOB_DATA,
value: assembled,
});
}
return {
entries: Array.from(entryMap.values()),
namespaces,
version: detectedVersion,
};
}

View File

@ -0,0 +1,344 @@
import { NvsPartition, NvsType, NvsVersion, PageState } from './types';
import {
PAGE_SIZE, PAGE_HEADER_SIZE, BITMAP_OFFSET, BITMAP_SIZE,
FIRST_ENTRY_OFFSET, ENTRY_SIZE, ENTRIES_PER_PAGE,
KEY_FIELD_SIZE, MIN_PARTITION_SIZE,
} from './constants';
import { crc32 } from '../shared/crc32';
import {
writeU8, writeU16, writeU32, writeI8, writeI16, writeI32,
writeU64, writeI64, writeNullTermString,
} from '../shared/binary-writer';
// ── Entry CRC (over bytes [0..3] + [8..31], skipping CRC at [4..7]) ──
function computeEntryCrc(entryBuf: Uint8Array, entryOff: number): number {
const crcData = new Uint8Array(28);
crcData.set(entryBuf.subarray(entryOff, entryOff + 4), 0);
crcData.set(entryBuf.subarray(entryOff + 8, entryOff + 32), 4);
return crc32(crcData);
}
// ── Page header CRC (over bytes [4..28]) ───────────────────────────
function computePageHeaderCrc(page: Uint8Array, pageOff: number): number {
return crc32(page.subarray(pageOff + 4, pageOff + 28));
}
// ── Bitmap: set entry state to WRITTEN ─────────────────────────────
function setEntryWritten(page: Uint8Array, pageOff: number, entryIndex: number) {
// WRITTEN = 0b10, EMPTY = 0b11
// Clear bit 0 of the 2-bit pair
const bitPos = entryIndex * 2;
const byteIdx = BITMAP_OFFSET + Math.floor(bitPos / 8);
const bitOff = bitPos % 8;
page[pageOff + byteIdx] &= ~(1 << bitOff);
}
// ── Planned entry to write ─────────────────────────────────────────
interface PlannedEntry {
nsIndex: number;
type: NvsType;
chunkIndex: number;
key: string;
span: number;
// For primitive types: the raw 8-byte data field
primitiveData?: (buf: Uint8Array, off: number) => void;
// For variable-length types: the raw payload bytes
payload?: Uint8Array;
payloadSize?: number;
payloadCrc?: number;
// For BLOB_IDX
blobIdxData?: { totalSize: number; chunkCount: number; chunkStart: number };
}
/**
* Serialize NvsPartition to NVS binary format.
* @param partition The partition data to serialize
* @param targetSize Target binary size in bytes (must be multiple of 4096, >= 12288)
*/
export function serializeBinary(partition: NvsPartition, targetSize: number): Uint8Array {
if (targetSize % PAGE_SIZE !== 0) {
throw new Error(`目标大小 (${targetSize}) 不是页大小 (${PAGE_SIZE}) 的倍数`);
}
if (targetSize < MIN_PARTITION_SIZE) {
throw new Error(`目标大小 (${targetSize}) 小于最小分区大小 (${MIN_PARTITION_SIZE})`);
}
// Allocate buffer filled with 0xFF (erased flash state)
const buf = new Uint8Array(targetSize);
buf.fill(0xFF);
// ── Step 1: Assign namespace indices ──
const nsToIndex = new Map<string, number>();
let nextNsIdx = 1;
for (const ns of partition.namespaces) {
nsToIndex.set(ns, nextNsIdx++);
}
// ── Step 2: Plan all entries ──
const planned: PlannedEntry[] = [];
// Namespace definition entries
for (const [ns, idx] of nsToIndex) {
planned.push({
nsIndex: 0,
type: NvsType.U8,
chunkIndex: 0xFF,
key: ns,
span: 1,
primitiveData: (b, o) => writeU8(b, o, idx),
});
}
// Data entries
for (const entry of partition.entries) {
const nsIdx = nsToIndex.get(entry.namespace);
if (nsIdx === undefined) continue;
switch (entry.type) {
case NvsType.U8:
planned.push({
nsIndex: nsIdx, type: NvsType.U8, chunkIndex: 0xFF,
key: entry.key, span: 1,
primitiveData: (b, o) => writeU8(b, o, entry.value as number),
});
break;
case NvsType.I8:
planned.push({
nsIndex: nsIdx, type: NvsType.I8, chunkIndex: 0xFF,
key: entry.key, span: 1,
primitiveData: (b, o) => writeI8(b, o, entry.value as number),
});
break;
case NvsType.U16:
planned.push({
nsIndex: nsIdx, type: NvsType.U16, chunkIndex: 0xFF,
key: entry.key, span: 1,
primitiveData: (b, o) => writeU16(b, o, entry.value as number),
});
break;
case NvsType.I16:
planned.push({
nsIndex: nsIdx, type: NvsType.I16, chunkIndex: 0xFF,
key: entry.key, span: 1,
primitiveData: (b, o) => writeI16(b, o, entry.value as number),
});
break;
case NvsType.U32:
planned.push({
nsIndex: nsIdx, type: NvsType.U32, chunkIndex: 0xFF,
key: entry.key, span: 1,
primitiveData: (b, o) => writeU32(b, o, entry.value as number),
});
break;
case NvsType.I32:
planned.push({
nsIndex: nsIdx, type: NvsType.I32, chunkIndex: 0xFF,
key: entry.key, span: 1,
primitiveData: (b, o) => writeI32(b, o, entry.value as number),
});
break;
case NvsType.U64:
planned.push({
nsIndex: nsIdx, type: NvsType.U64, chunkIndex: 0xFF,
key: entry.key, span: 1,
primitiveData: (b, o) => writeU64(b, o, entry.value as bigint),
});
break;
case NvsType.I64:
planned.push({
nsIndex: nsIdx, type: NvsType.I64, chunkIndex: 0xFF,
key: entry.key, span: 1,
primitiveData: (b, o) => writeI64(b, o, entry.value as bigint),
});
break;
case NvsType.SZ: {
const strBytes = new TextEncoder().encode(entry.value as string);
// +1 for null terminator
const payload = new Uint8Array(strBytes.length + 1);
payload.set(strBytes);
payload[strBytes.length] = 0;
const span = 1 + Math.ceil(payload.length / ENTRY_SIZE);
planned.push({
nsIndex: nsIdx, type: NvsType.SZ, chunkIndex: 0xFF,
key: entry.key, span,
payload,
payloadSize: payload.length,
payloadCrc: crc32(payload),
});
break;
}
case NvsType.BLOB: {
// Legacy V1 blob (single-page)
const blobData = entry.value as Uint8Array;
const span = 1 + Math.ceil(blobData.length / ENTRY_SIZE);
planned.push({
nsIndex: nsIdx, type: NvsType.BLOB, chunkIndex: 0xFF,
key: entry.key, span,
payload: blobData,
payloadSize: blobData.length,
payloadCrc: crc32(blobData),
});
break;
}
case NvsType.BLOB_DATA: {
// V2 multi-page blob: split into chunks that fit in a single page
const blobData = entry.value as Uint8Array;
const maxChunkPayload = (ENTRIES_PER_PAGE - 1) * ENTRY_SIZE;
const chunkCount = Math.max(1, Math.ceil(blobData.length / maxChunkPayload));
let dataOffset = 0;
for (let ci = 0; ci < chunkCount; ci++) {
const chunkEnd = Math.min(dataOffset + maxChunkPayload, blobData.length);
const chunkData = blobData.subarray(dataOffset, chunkEnd);
const chunkSpan = 1 + Math.ceil(chunkData.length / ENTRY_SIZE);
planned.push({
nsIndex: nsIdx, type: NvsType.BLOB_DATA, chunkIndex: ci,
key: entry.key, span: chunkSpan,
payload: new Uint8Array(chunkData),
payloadSize: chunkData.length,
payloadCrc: crc32(chunkData),
});
dataOffset = chunkEnd;
}
// BLOB_IDX entry
planned.push({
nsIndex: nsIdx, type: NvsType.BLOB_IDX, chunkIndex: 0xFF,
key: entry.key, span: 1,
blobIdxData: { totalSize: blobData.length, chunkCount, chunkStart: 0 },
});
break;
}
}
}
// ── Step 3: Write entries into pages ──
const totalPages = targetSize / PAGE_SIZE;
let currentPage = 0;
let currentEntryIdx = 0;
let seqNumber = 0;
let plannedIdx = 0;
while (plannedIdx < planned.length && currentPage < totalPages) {
const pageOff = currentPage * PAGE_SIZE;
// Initialize page: already 0xFF (EMPTY state)
// Write page state = ACTIVE
writeU32(buf, pageOff, PageStateVal.ACTIVE);
// Write sequence number
writeU32(buf, pageOff + 4, seqNumber);
// Write version
writeU8(buf, pageOff + 8, partition.version);
currentEntryIdx = 0;
while (plannedIdx < planned.length && currentEntryIdx < ENTRIES_PER_PAGE) {
const pe = planned[plannedIdx];
// Check if this entry fits in the remaining slots of the current page
if (currentEntryIdx + pe.span > ENTRIES_PER_PAGE) {
// Does not fit — move to next page
break;
}
const entOff = pageOff + FIRST_ENTRY_OFFSET + currentEntryIdx * ENTRY_SIZE;
// Clear the entry area (set to 0x00, not 0xFF, for entry data)
buf.fill(0x00, entOff, entOff + pe.span * ENTRY_SIZE);
// Write entry header (32 bytes)
writeU8(buf, entOff + 0, pe.nsIndex);
writeU8(buf, entOff + 1, pe.type);
writeU8(buf, entOff + 2, pe.span);
writeU8(buf, entOff + 3, pe.chunkIndex);
// CRC at [4..7] will be computed after writing key+data
// Write key
writeNullTermString(buf, entOff + 8, pe.key, KEY_FIELD_SIZE);
// Write data field (8 bytes at offset 24)
if (pe.primitiveData) {
// Primitive type: write value into data field
pe.primitiveData(buf, entOff + 24);
} else if (pe.payload !== undefined && pe.payloadSize !== undefined) {
// Variable-length: size at [24..25], reserved [26..27]=0, dataCrc at [28..31]
writeU16(buf, entOff + 24, pe.payloadSize);
// [26..27] already 0
writeU32(buf, entOff + 28, pe.payloadCrc!);
// Write payload into subsequent entries
const payloadOff = entOff + ENTRY_SIZE;
buf.set(pe.payload.subarray(0, (pe.span - 1) * ENTRY_SIZE), payloadOff);
} else if (pe.blobIdxData) {
// BLOB_IDX: totalSize[24..27], chunkCount[28], chunkStart[29]
writeU32(buf, entOff + 24, pe.blobIdxData.totalSize);
writeU8(buf, entOff + 28, pe.blobIdxData.chunkCount);
writeU8(buf, entOff + 29, pe.blobIdxData.chunkStart);
}
// Compute and write entry CRC
const entryCrc = computeEntryCrc(buf, entOff);
writeU32(buf, entOff + 4, entryCrc);
// Update bitmap: mark all entries in span as WRITTEN
for (let s = 0; s < pe.span; s++) {
setEntryWritten(buf, pageOff, currentEntryIdx + s);
}
currentEntryIdx += pe.span;
plannedIdx++;
}
// Compute and write page header CRC
const headerCrc = computePageHeaderCrc(buf, pageOff);
writeU32(buf, pageOff + 28, headerCrc);
// If there are more entries, mark this page as FULL
if (plannedIdx < planned.length) {
writeU32(buf, pageOff, PageStateVal.FULL);
// Recompute header CRC after state change
const newHeaderCrc = computePageHeaderCrc(buf, pageOff);
writeU32(buf, pageOff + 28, newHeaderCrc);
}
currentPage++;
seqNumber++;
}
if (plannedIdx < planned.length) {
throw new Error(
`分区空间不足: 还有 ${planned.length - plannedIdx} 个条目无法写入。` +
`请增大分区大小。`
);
}
return buf;
}
/** Numeric values for page states (matching PageState enum but as regular numbers for writeU32) */
const PageStateVal = {
EMPTY: 0xFFFFFFFF,
ACTIVE: 0xFFFFFFFE,
FULL: 0xFFFFFFFC,
FREEING: 0xFFFFFFFA,
} as const;

232
lib/nvs/nvs-csv-parser.ts Normal file
View File

@ -0,0 +1,232 @@
import { NvsPartition, NvsEntry, NvsType, NvsVersion, NvsEncoding, ENCODING_TO_TYPE } from './types';
import { generateEntryId } from './nvs-partition';
/**
* Parse a line respecting quoted fields.
* Handles fields with commas inside double quotes.
*/
function splitCsvLine(line: string): string[] {
const fields: string[] = [];
let current = '';
let inQuotes = false;
let wasQuoted = false;
for (let i = 0; i < line.length; i++) {
const ch = line[i];
if (ch === '"') {
if (inQuotes && i + 1 < line.length && line[i + 1] === '"') {
current += '"';
i++; // skip escaped quote
} else {
inQuotes = !inQuotes;
if (inQuotes) wasQuoted = true;
}
} else if (ch === ',' && !inQuotes) {
fields.push(wasQuoted ? current : current.trim());
current = '';
wasQuoted = false;
} else {
current += ch;
}
}
fields.push(wasQuoted ? current : current.trim());
return fields;
}
/** Parse an integer value from CSV, supporting decimal and 0x hex. Rejects partial matches like "12abc". */
function parseIntValue(str: string): number {
str = str.trim();
let val: number;
if (str.startsWith('0x') || str.startsWith('0X')) {
if (!/^-?0[xX][0-9a-fA-F]+$/.test(str)) {
throw new Error(`无效的整数值: "${str}"`);
}
val = parseInt(str, 16);
} else {
if (!/^-?\d+$/.test(str)) {
throw new Error(`无效的整数值: "${str}"`);
}
val = parseInt(str, 10);
}
if (Number.isNaN(val)) {
throw new Error(`无效的整数值: "${str}"`);
}
return val;
}
/** Parse a bigint value from CSV, supporting decimal and 0x hex (including negative hex like -0x1A). */
function parseBigIntValue(str: string): bigint {
str = str.trim();
// JS BigInt() accepts decimal and positive hex (0x...) but throws on negative hex (-0x...).
// Handle negative hex explicitly.
if (str.startsWith('-0x') || str.startsWith('-0X')) {
if (!/^-0[xX][0-9a-fA-F]+$/.test(str)) {
throw new Error(`无效的整数值: "${str}"`);
}
return -BigInt(str.slice(1));
}
try {
return BigInt(str);
} catch {
throw new Error(`无效的整数值: "${str}"`);
}
}
/** Decode hex string (e.g. "48656c6c6f") to Uint8Array */
function hexToBytes(hex: string): Uint8Array {
hex = hex.replace(/\s/g, '');
const bytes = new Uint8Array(hex.length / 2);
for (let i = 0; i < bytes.length; i++) {
bytes[i] = parseInt(hex.substring(i * 2, i * 2 + 2), 16);
}
return bytes;
}
/**
* Try to decode a base64 string to Uint8Array.
* Returns null if the string doesn't look like valid base64.
*/
function tryBase64Decode(str: string): Uint8Array | null {
try {
if (!/^[A-Za-z0-9+/=]+$/.test(str.trim())) return null;
const bin = atob(str.trim());
const bytes = new Uint8Array(bin.length);
for (let i = 0; i < bin.length; i++) bytes[i] = bin.charCodeAt(i);
return bytes;
} catch {
return null;
}
}
/**
* Parse ESP-IDF NVS CSV format into NvsPartition.
*
* CSV format:
* key,type,encoding,value
* namespace_name,namespace,,
* wifi_ssid,data,string,MyNetwork
* boot_count,data,u8,0
*/
/**
* Split CSV text into logical lines, respecting double-quoted fields that may
* span multiple physical lines (RFC 4180 multiline support).
*/
function splitCsvLines(text: string): string[] {
const result: string[] = [];
let current = '';
let inQuotes = false;
for (let i = 0; i < text.length; i++) {
const ch = text[i];
if (ch === '"') {
// Check for escaped quote ""
if (inQuotes && i + 1 < text.length && text[i + 1] === '"') {
current += '""';
i++;
} else {
inQuotes = !inQuotes;
current += ch;
}
} else if ((ch === '\n' || (ch === '\r' && text[i + 1] === '\n')) && !inQuotes) {
if (ch === '\r') i++; // consume \n of \r\n
result.push(current);
current = '';
} else {
current += ch;
}
}
if (current) result.push(current);
return result;
}
export function parseCsv(text: string): NvsPartition {
const lines = splitCsvLines(text);
const entries: NvsEntry[] = [];
const namespaces: string[] = [];
let currentNamespace = '';
for (let i = 0; i < lines.length; i++) {
const line = lines[i].trim();
if (!line || line.startsWith('#')) continue;
const fields = splitCsvLine(line);
if (fields.length < 2) continue;
// Skip header line
if (fields[0] === 'key' && fields[1] === 'type') continue;
const key = fields[0];
const type = fields[1];
const encoding = (fields[2] || '').toLowerCase() as NvsEncoding | '';
const value = fields[3] || '';
if (type === 'namespace') {
currentNamespace = key;
if (!namespaces.includes(key)) {
namespaces.push(key);
}
continue;
}
if (!currentNamespace) {
throw new Error(`${i + 1}: 数据条目 "${key}" 出现在任何命名空间之前`);
}
if (type !== 'data' && type !== 'file') {
throw new Error(`${i + 1}: 未知类型 "${type}"`);
}
if (!encoding) {
throw new Error(`${i + 1}: 键 "${key}" 缺少编码类型`);
}
const nvsType = ENCODING_TO_TYPE[encoding as NvsEncoding];
if (nvsType === undefined) {
throw new Error(`${i + 1}: 未知编码 "${encoding}"`);
}
let parsedValue: number | bigint | string | Uint8Array;
switch (encoding) {
case 'u8': case 'u16': case 'u32':
case 'i8': case 'i16': case 'i32':
parsedValue = parseIntValue(value);
break;
case 'u64': case 'i64':
parsedValue = parseBigIntValue(value);
break;
case 'string':
parsedValue = value;
break;
case 'blob':
case 'binary': {
if (type === 'file') {
// In browser context, file paths can't be resolved.
// Store an empty Uint8Array — the UI should handle file picking.
parsedValue = new Uint8Array(0);
} else {
// Try hex decode first (strip whitespace before checking), then base64
const hexClean = value.replace(/\s/g, '');
if (/^[0-9a-fA-F]+$/.test(hexClean) && hexClean.length % 2 === 0 && hexClean.length > 0) {
parsedValue = hexToBytes(value);
} else {
const b64 = tryBase64Decode(value);
parsedValue = b64 ?? new TextEncoder().encode(value);
}
}
break;
}
default:
parsedValue = value;
}
entries.push({
id: generateEntryId(),
namespace: currentNamespace,
key,
type: nvsType,
value: parsedValue,
});
}
return { entries, namespaces, version: NvsVersion.V2 };
}

View File

@ -0,0 +1,63 @@
import { NvsPartition, NvsType, TYPE_TO_ENCODING, isPrimitiveType } from './types';
/** Convert Uint8Array to hex string */
function bytesToHex(data: Uint8Array): string {
return Array.from(data).map(b => b.toString(16).padStart(2, '0')).join('');
}
/** Escape a CSV field if it contains commas, quotes, or newlines */
function escapeCsvField(value: string): string {
if (value.includes(',') || value.includes('"') || value.includes('\n')) {
return '"' + value.replace(/"/g, '""') + '"';
}
return value;
}
/**
* Serialize NvsPartition to ESP-IDF NVS CSV format.
*
* Output format:
* key,type,encoding,value
* namespace_name,namespace,,
* wifi_ssid,data,string,MyNetwork
*/
export function serializeCsv(partition: NvsPartition): string {
const lines: string[] = ['key,type,encoding,value'];
// Group entries by namespace
const grouped = new Map<string, typeof partition.entries>();
for (const ns of partition.namespaces) {
grouped.set(ns, []);
}
for (const entry of partition.entries) {
let list = grouped.get(entry.namespace);
if (!list) {
list = [];
grouped.set(entry.namespace, list);
}
list.push(entry);
}
for (const [ns, entries] of grouped) {
// Namespace declaration line
lines.push(`${escapeCsvField(ns)},namespace,,`);
for (const entry of entries) {
const encoding = TYPE_TO_ENCODING[entry.type];
let valueStr: string;
if (isPrimitiveType(entry.type)) {
valueStr = String(entry.value);
} else if (entry.type === NvsType.SZ) {
valueStr = escapeCsvField(entry.value as string);
} else {
// BLOB / BLOB_DATA — hex encode
valueStr = bytesToHex(entry.value as Uint8Array);
}
lines.push(`${escapeCsvField(entry.key)},data,${encoding},${valueStr}`);
}
}
return lines.join('\n') + '\n';
}

278
lib/nvs/nvs-partition.ts Normal file
View File

@ -0,0 +1,278 @@
import {
NvsEntry, NvsPartition, NvsFlashStats, NvsType, NvsVersion,
isPrimitiveType,
} from './types';
import { ENTRIES_PER_PAGE, ENTRY_SIZE, PAGE_SIZE, MAX_KEY_LENGTH, MAX_NAMESPACES, MAX_STRING_LENGTH, MAX_BLOB_SIZE_V1, MAX_BLOB_SIZE_V2 } from './constants';
/** Generate a random unique ID for client-side entry tracking */
export function generateEntryId(): string {
return Math.random().toString(36).slice(2) + Date.now().toString(36);
}
/** Create an empty partition with default V2 version */
export function createEmptyPartition(version: NvsVersion = NvsVersion.V2): NvsPartition {
return { entries: [], namespaces: [], version };
}
/** Add a new entry. Returns a new NvsPartition (immutable). */
export function addEntry(
partition: NvsPartition,
entry: Omit<NvsEntry, 'id'>,
): NvsPartition {
const newEntry: NvsEntry = { ...entry, id: generateEntryId() };
const namespaces = partition.namespaces.includes(entry.namespace)
? partition.namespaces
: [...partition.namespaces, entry.namespace];
return {
...partition,
entries: [...partition.entries, newEntry],
namespaces,
};
}
/** Remove an entry by ID. Returns a new NvsPartition. */
export function removeEntry(partition: NvsPartition, entryId: string): NvsPartition {
const entries = partition.entries.filter(e => e.id !== entryId);
// Clean up namespaces that have no remaining entries
const usedNs = new Set(entries.map(e => e.namespace));
const namespaces = partition.namespaces.filter(ns => usedNs.has(ns));
return { ...partition, entries, namespaces };
}
/** Update an existing entry. Returns a new NvsPartition. */
export function updateEntry(
partition: NvsPartition,
entryId: string,
updates: Partial<Omit<NvsEntry, 'id'>>,
): NvsPartition {
const entries = partition.entries.map(e =>
e.id === entryId ? { ...e, ...updates } : e,
);
// If namespace changed, ensure it's in the list
let namespaces = partition.namespaces;
if (updates.namespace && !namespaces.includes(updates.namespace)) {
namespaces = [...namespaces, updates.namespace];
}
// Clean up unused namespaces
const usedNs = new Set(entries.map(e => e.namespace));
namespaces = namespaces.filter(ns => usedNs.has(ns));
return { ...partition, entries, namespaces };
}
/** Duplicate an entry with a new ID. */
export function duplicateEntry(partition: NvsPartition, entryId: string): NvsPartition {
const source = partition.entries.find(e => e.id === entryId);
if (!source) return partition;
const clone: NvsEntry = {
...source,
id: generateEntryId(),
key: source.key,
// Deep copy Uint8Array values
value: source.value instanceof Uint8Array
? new Uint8Array(source.value)
: source.value,
};
return { ...partition, entries: [...partition.entries, clone] };
}
/**
* Merge source into target by (namespace, key) match.
* @param mode 'overwrite' replaces matching entries; 'skip' keeps target's value
*/
export function mergePartitions(
target: NvsPartition,
source: NvsPartition,
mode: 'overwrite' | 'skip' = 'overwrite',
): NvsPartition {
const entries = [...target.entries];
const namespaces = [...target.namespaces];
for (const srcEntry of source.entries) {
if (!namespaces.includes(srcEntry.namespace)) {
namespaces.push(srcEntry.namespace);
}
const idx = entries.findIndex(
e => e.namespace === srcEntry.namespace && e.key === srcEntry.key,
);
if (idx >= 0) {
if (mode === 'overwrite') {
entries[idx] = { ...srcEntry, id: entries[idx].id };
}
// skip: do nothing
} else {
entries.push({ ...srcEntry, id: generateEntryId() });
}
}
return { ...target, entries, namespaces };
}
/** Calculate the entry span for a single NvsEntry */
export function entrySpan(entry: NvsEntry, version: NvsVersion): number {
if (isPrimitiveType(entry.type)) return 1;
if (entry.type === NvsType.SZ) {
const strBytes = new TextEncoder().encode(entry.value as string);
const dataLen = strBytes.length + 1; // +1 for null terminator
return 1 + Math.ceil(dataLen / ENTRY_SIZE);
}
// BLOB / BLOB_DATA
const data = entry.value as Uint8Array;
if (version === NvsVersion.V1 || entry.type === NvsType.BLOB) {
return 1 + Math.ceil(data.length / ENTRY_SIZE);
}
// V2: BLOB_DATA chunks + BLOB_IDX — each chunk has its own header entry
const maxChunkPayload = (ENTRIES_PER_PAGE - 1) * ENTRY_SIZE;
const chunkCount = Math.max(1, Math.ceil(data.length / maxChunkPayload));
let totalSpan = 0;
let remaining = data.length;
for (let i = 0; i < chunkCount; i++) {
const chunkLen = Math.min(remaining, maxChunkPayload);
totalSpan += 1 + Math.ceil(chunkLen / ENTRY_SIZE);
remaining -= chunkLen;
}
return totalSpan + 1; // +1 for BLOB_IDX entry
}
/** Calculate flash usage statistics for a given partition at a target size */
export function calculateFlashStats(
partition: NvsPartition,
targetSizeBytes: number,
): NvsFlashStats {
const totalPages = Math.floor(targetSizeBytes / PAGE_SIZE);
const usablePages = Math.max(totalPages - 1, 0); // reserve 1 for GC
const maxEntries = usablePages * ENTRIES_PER_PAGE;
// Build a flat list of entry spans (namespace defs + data entries)
const spans: number[] = [];
for (const _ns of partition.namespaces) spans.push(1);
for (const entry of partition.entries) spans.push(entrySpan(entry, partition.version));
// Simulate page-packing to count actual slot consumption (including fragmentation waste).
// Entries cannot span page boundaries; remaining slots on a page are wasted when an entry
// doesn't fit, identical to the serializer's behaviour.
let currentEntryIdx = 0;
let totalSlotsUsed = 0;
for (const span of spans) {
if (currentEntryIdx + span > ENTRIES_PER_PAGE) {
totalSlotsUsed += ENTRIES_PER_PAGE - currentEntryIdx; // wasted slots
currentEntryIdx = 0;
}
totalSlotsUsed += span;
currentEntryIdx += span;
if (currentEntryIdx >= ENTRIES_PER_PAGE) currentEntryIdx = 0;
}
const logicalEntries = spans.reduce((a, b) => a + b, 0);
const usedBytes = totalSlotsUsed * ENTRY_SIZE + totalPages * 64; // 64 = header + bitmap
const usagePercent = maxEntries > 0 ? Math.min((totalSlotsUsed / maxEntries) * 100, 100) : 0;
return {
totalBytes: targetSizeBytes,
totalPages,
usedEntries: logicalEntries,
maxEntries,
usedBytes,
usagePercent: Math.round(usagePercent * 10) / 10,
};
}
/** Validate partition data. Returns array of error messages (empty = valid). */
export function validatePartition(partition: NvsPartition): string[] {
const errors: string[] = [];
if (partition.namespaces.length > MAX_NAMESPACES) {
errors.push(`命名空间数量超过上限 ${MAX_NAMESPACES}`);
}
for (const ns of partition.namespaces) {
if (ns.length === 0) {
errors.push('命名空间名称不能为空');
}
if (ns.length > MAX_KEY_LENGTH) {
errors.push(`命名空间 "${ns}" 名称超过 ${MAX_KEY_LENGTH} 字符`);
}
}
for (const entry of partition.entries) {
if (entry.key.length === 0) {
errors.push(`在命名空间 "${entry.namespace}" 中存在空键名`);
}
if (entry.key.length > MAX_KEY_LENGTH) {
errors.push(`键 "${entry.key}" 名称超过 ${MAX_KEY_LENGTH} 字符`);
}
if (!partition.namespaces.includes(entry.namespace)) {
errors.push(`键 "${entry.key}" 的命名空间 "${entry.namespace}" 未注册`);
}
// Validate value ranges for primitive types
if (isPrimitiveType(entry.type)) {
if (typeof entry.value === 'number') {
const v = entry.value;
switch (entry.type) {
case NvsType.U8: if (v < 0 || v > 0xFF) errors.push(`"${entry.key}" U8 值超出范围`); break;
case NvsType.I8: if (v < -128 || v > 127) errors.push(`"${entry.key}" I8 值超出范围`); break;
case NvsType.U16: if (v < 0 || v > 0xFFFF) errors.push(`"${entry.key}" U16 值超出范围`); break;
case NvsType.I16: if (v < -32768 || v > 32767) errors.push(`"${entry.key}" I16 值超出范围`); break;
case NvsType.U32: if (v < 0 || v > 0xFFFFFFFF) errors.push(`"${entry.key}" U32 值超出范围`); break;
case NvsType.I32: if (v < -2147483648 || v > 2147483647) errors.push(`"${entry.key}" I32 值超出范围`); break;
}
} else if (typeof entry.value === 'bigint') {
const v = entry.value;
switch (entry.type) {
case NvsType.U64:
if (v < 0n || v > 0xFFFFFFFFFFFFFFFFn) errors.push(`"${entry.key}" U64 值超出范围`);
break;
case NvsType.I64:
if (v < -9223372036854775808n || v > 9223372036854775807n) errors.push(`"${entry.key}" I64 值超出范围`);
break;
}
}
}
// Validate string length
if (entry.type === NvsType.SZ && typeof entry.value === 'string') {
const byteLen = new TextEncoder().encode(entry.value).length;
if (byteLen >= MAX_STRING_LENGTH) {
errors.push(`"${entry.key}" 字符串长度 ${byteLen} 字节超过上限 ${MAX_STRING_LENGTH - 1}`);
}
}
// Validate blob size
// NvsType.BLOB uses the legacy V1 single-page format regardless of partition version,
// so it is always capped at MAX_BLOB_SIZE_V1.
// NvsType.BLOB_DATA uses the V2 chunked format and is capped at MAX_BLOB_SIZE_V2.
if (entry.type === NvsType.BLOB && entry.value instanceof Uint8Array) {
if (entry.value.length > MAX_BLOB_SIZE_V1) {
errors.push(`"${entry.key}" BLOB ${entry.value.length} 字节超过上限 ${MAX_BLOB_SIZE_V1}`);
}
} else if (entry.type === NvsType.BLOB_DATA && entry.value instanceof Uint8Array) {
if (entry.value.length > MAX_BLOB_SIZE_V2) {
errors.push(`"${entry.key}" BLOB ${entry.value.length} 字节超过 V2 上限 ${MAX_BLOB_SIZE_V2}`);
}
}
}
// Check for duplicate (namespace, key) pairs
const seen = new Set<string>();
for (const entry of partition.entries) {
const k = `${entry.namespace}::${entry.key}`;
if (seen.has(k)) {
errors.push(`重复键: ${entry.namespace}/${entry.key}`);
}
seen.add(k);
}
return errors;
}
/** Sort entries by namespace, then by key */
export function sortEntries(partition: NvsPartition): NvsPartition {
const entries = [...partition.entries].sort((a, b) => {
const nsCmp = a.namespace.localeCompare(b.namespace);
return nsCmp !== 0 ? nsCmp : a.key.localeCompare(b.key);
});
return { ...partition, entries };
}

137
lib/nvs/types.ts Normal file
View File

@ -0,0 +1,137 @@
/** NVS data type enum, matching ESP-IDF binary encoding */
export enum NvsType {
U8 = 0x01,
I8 = 0x11,
U16 = 0x02,
I16 = 0x12,
U32 = 0x04,
I32 = 0x14,
U64 = 0x08,
I64 = 0x18,
SZ = 0x21,
BLOB = 0x41,
BLOB_DATA = 0x42,
BLOB_IDX = 0x48,
}
/** Page states as written to flash (bit-clearing transitions) */
export enum PageState {
EMPTY = 0xFFFFFFFF,
ACTIVE = 0xFFFFFFFE,
FULL = 0xFFFFFFFC,
FREEING = 0xFFFFFFFA,
CORRUPT = 0x00000000,
}
/** Entry state in the 2-bit bitmap */
export enum EntryState {
EMPTY = 0x3,
WRITTEN = 0x2,
ERASED = 0x0,
}
/** NVS format version */
export enum NvsVersion {
V1 = 0xFF,
V2 = 0xFE,
}
/** Human-friendly encoding name for CSV and UI */
export type NvsEncoding =
| 'u8' | 'i8' | 'u16' | 'i16' | 'u32' | 'i32' | 'u64' | 'i64'
| 'string' | 'blob' | 'binary';
/** A single NVS key-value record (the user-facing data model) */
export interface NvsEntry {
id: string;
namespace: string;
key: string;
type: NvsType;
value: number | bigint | string | Uint8Array;
}
/** The top-level partition data model */
export interface NvsPartition {
entries: NvsEntry[];
namespaces: string[];
version: NvsVersion;
}
/** Statistics about flash usage */
export interface NvsFlashStats {
totalBytes: number;
totalPages: number;
usedEntries: number;
maxEntries: number;
usedBytes: number;
usagePercent: number;
}
/** CSV row as parsed from text */
export interface NvsCsvRow {
key: string;
type: 'namespace' | 'data' | 'file';
encoding: NvsEncoding | '';
value: string;
}
/** Maps NvsType to the human-friendly encoding name */
export const TYPE_TO_ENCODING: Record<NvsType, NvsEncoding> = {
[NvsType.U8]: 'u8',
[NvsType.I8]: 'i8',
[NvsType.U16]: 'u16',
[NvsType.I16]: 'i16',
[NvsType.U32]: 'u32',
[NvsType.I32]: 'i32',
[NvsType.U64]: 'u64',
[NvsType.I64]: 'i64',
[NvsType.SZ]: 'string',
[NvsType.BLOB]: 'blob',
[NvsType.BLOB_DATA]: 'binary',
[NvsType.BLOB_IDX]: 'binary',
};
/** Maps encoding name to NvsType (for new entries, prefer V2 BLOB_DATA over legacy BLOB) */
export const ENCODING_TO_TYPE: Record<NvsEncoding, NvsType> = {
'u8': NvsType.U8,
'i8': NvsType.I8,
'u16': NvsType.U16,
'i16': NvsType.I16,
'u32': NvsType.U32,
'i32': NvsType.I32,
'u64': NvsType.U64,
'i64': NvsType.I64,
'string': NvsType.SZ,
'blob': NvsType.BLOB,
'binary': NvsType.BLOB_DATA,
};
/** All encoding options for UI dropdowns */
export const ENCODING_OPTIONS: NvsEncoding[] = [
'u8', 'i8', 'u16', 'i16', 'u32', 'i32', 'u64', 'i64', 'string', 'blob', 'binary',
];
/** Check if a type is a primitive integer */
export function isPrimitiveType(type: NvsType): boolean {
return type === NvsType.U8 || type === NvsType.I8 ||
type === NvsType.U16 || type === NvsType.I16 ||
type === NvsType.U32 || type === NvsType.I32 ||
type === NvsType.U64 || type === NvsType.I64;
}
/** Check if a type is variable-length (string or blob) */
export function isVariableLengthType(type: NvsType): boolean {
return type === NvsType.SZ || type === NvsType.BLOB ||
type === NvsType.BLOB_DATA || type === NvsType.BLOB_IDX;
}
/** Get the byte size of a primitive type's value */
export function primitiveSize(type: NvsType): number {
switch (type) {
case NvsType.U8: case NvsType.I8: return 1;
case NvsType.U16: case NvsType.I16: return 2;
case NvsType.U32: case NvsType.I32: return 4;
case NvsType.U64: case NvsType.I64: return 8;
default: return 0;
}
}