Add per-hex metadata system for scale-invariant classification
Each hex now gets a meta/<q>_<r>.json with stable ID, pixel center coordinates, pixel bounds, labels, notes, and classification status. The pixelCenter acts as a scale-independent anchor: when switching from 10 Meilen/Hex to 5 Meilen/Hex, pixelToAxial(meta.pixelCenter, newSize) maps coarse hexes to fine hexes without re-running classification. Adds: - pipeline/build-hexmeta.ts: creates/updates metadata + exports data/hexmeta-<map-id>.jsonl (committed, survives git clones) - pipeline/auto-classify-ocean.ts: pixel-based ocean auto-detection - pipeline/create-map.ts: one-off DB map entry creation - extract-submaps.ts: writes meta/<q>_<r>.json during extraction - data/hexmeta-1.jsonl: 8844 hex metadata entries for Aventurien map 1 Co-Authored-By: Claude Sonnet 4.6 <noreply@anthropic.com>
This commit is contained in:
174
pipeline/build-hexmeta.ts
Normal file
174
pipeline/build-hexmeta.ts
Normal file
@@ -0,0 +1,174 @@
|
||||
/**
|
||||
* Build or update hex metadata files.
|
||||
*
|
||||
* Creates pipeline/submaps/<map-id>/meta/<q>_<r>.json per hex and
|
||||
* a consolidated data/hexmeta-<map-id>.jsonl (committed to git).
|
||||
*
|
||||
* Metadata per hex:
|
||||
* id — stable sequential integer, assigned once from manifest order
|
||||
* q, r — axial coordinates at the current scale
|
||||
* pixelCenter — center in source image pixels (scale-independent anchor)
|
||||
* pixelBounds — approximate pixel coverage in source image
|
||||
* hexSizePx — hex size at time of extraction
|
||||
* meilenPerHex — scale (e.g. 10)
|
||||
* mapId — DB map id
|
||||
* labels — text labels visible in the submap (manually noted or OCR)
|
||||
* notes — free-form observations
|
||||
* classification — {base, features, method, classifiedAt} or null
|
||||
*
|
||||
* Cross-scale lookup:
|
||||
* To find which coarse hex (hexSize=40) a fine hex (hexSize=20) falls in:
|
||||
* pixelToAxial(meta.pixelCenter, 40, origin)
|
||||
* The pixelCenter is always stored at the ORIGINAL source image resolution.
|
||||
*
|
||||
* Usage:
|
||||
* npx tsx pipeline/build-hexmeta.ts <map-id>
|
||||
* npx tsx pipeline/build-hexmeta.ts <map-id> --update-from-classifications
|
||||
*/
|
||||
|
||||
import { readFileSync, writeFileSync, mkdirSync, existsSync } from 'fs';
|
||||
import { join, resolve, dirname } from 'path';
|
||||
import { fileURLToPath } from 'url';
|
||||
import { axialToPixel } from '../core/coords.js';
|
||||
import type { PixelCoord } from '../core/types.js';
|
||||
|
||||
const ROOT = resolve(dirname(fileURLToPath(import.meta.url)), '..');
|
||||
const PIXELS_PER_MEILE = 8;
|
||||
|
||||
export interface HexMeta {
|
||||
id: number;
|
||||
q: number;
|
||||
r: number;
|
||||
mapId: number;
|
||||
hexSizePx: number;
|
||||
meilenPerHex: number;
|
||||
pixelCenter: PixelCoord;
|
||||
pixelBounds: { left: number; top: number; right: number; bottom: number };
|
||||
labels: string[];
|
||||
notes: string;
|
||||
classification: {
|
||||
base: string;
|
||||
features: Array<{ terrainId: string; edgeMask: number }>;
|
||||
method: 'auto' | 'manual' | 'manual-session';
|
||||
classifiedAt: string;
|
||||
} | null;
|
||||
}
|
||||
|
||||
function computeBounds(
|
||||
cx: number, cy: number,
|
||||
hexSize: number,
|
||||
imageWidth: number,
|
||||
imageHeight: number,
|
||||
cropFactor = 2.8,
|
||||
): { left: number; top: number; right: number; bottom: number } {
|
||||
const r = Math.ceil(hexSize * cropFactor);
|
||||
return {
|
||||
left: Math.max(0, Math.round(cx - r)),
|
||||
top: Math.max(0, Math.round(cy - r)),
|
||||
right: Math.min(imageWidth, Math.round(cx + r)),
|
||||
bottom: Math.min(imageHeight, Math.round(cy + r)),
|
||||
};
|
||||
}
|
||||
|
||||
async function main() {
|
||||
const mapId = parseInt(process.argv[2], 10);
|
||||
if (isNaN(mapId)) {
|
||||
console.error('Usage: npx tsx pipeline/build-hexmeta.ts <map-id>');
|
||||
process.exit(1);
|
||||
}
|
||||
|
||||
const updateFromClassifications = process.argv.includes('--update-from-classifications');
|
||||
const submapDir = join(ROOT, 'pipeline', 'submaps', String(mapId));
|
||||
const metaDir = join(submapDir, 'meta');
|
||||
const manifestPath = join(submapDir, 'manifest.json');
|
||||
const classPath = join(submapDir, 'classifications.json');
|
||||
const outJsonl = join(ROOT, 'data', `hexmeta-${mapId}.jsonl`);
|
||||
|
||||
if (!existsSync(manifestPath)) {
|
||||
console.error('No manifest found. Run extract-submaps first.');
|
||||
process.exit(1);
|
||||
}
|
||||
|
||||
mkdirSync(metaDir, { recursive: true });
|
||||
mkdirSync(join(ROOT, 'data'), { recursive: true });
|
||||
|
||||
const manifest = JSON.parse(readFileSync(manifestPath, 'utf-8'));
|
||||
const { hexes, hexSize, originX, originY, imageWidth, imageHeight, meilenPerHex } = manifest;
|
||||
const origin: PixelCoord = { x: originX, y: originY };
|
||||
|
||||
// Load existing classifications for merge
|
||||
const classMap = new Map<string, { base: string; features: any[] }>();
|
||||
if (existsSync(classPath)) {
|
||||
for (const c of JSON.parse(readFileSync(classPath, 'utf-8'))) {
|
||||
classMap.set(`${c.q},${c.r}`, c);
|
||||
}
|
||||
}
|
||||
|
||||
let created = 0;
|
||||
let updated = 0;
|
||||
const allMeta: HexMeta[] = [];
|
||||
|
||||
for (let i = 0; i < hexes.length; i++) {
|
||||
const { q, r } = hexes[i];
|
||||
const metaPath = join(metaDir, `${q}_${r}.json`);
|
||||
const key = `${q},${r}`;
|
||||
|
||||
const pixelCenter = axialToPixel({ q, r }, hexSize, origin);
|
||||
const pixelBounds = computeBounds(pixelCenter.x, pixelCenter.y, hexSize, imageWidth, imageHeight);
|
||||
|
||||
let meta: HexMeta;
|
||||
|
||||
if (existsSync(metaPath)) {
|
||||
// Load and potentially update existing
|
||||
meta = JSON.parse(readFileSync(metaPath, 'utf-8'));
|
||||
let changed = false;
|
||||
|
||||
if (updateFromClassifications && classMap.has(key) && !meta.classification) {
|
||||
const c = classMap.get(key)!;
|
||||
meta.classification = {
|
||||
base: c.base,
|
||||
features: c.features ?? [],
|
||||
method: 'auto',
|
||||
classifiedAt: new Date().toISOString(),
|
||||
};
|
||||
changed = true;
|
||||
}
|
||||
|
||||
if (changed) { writeFileSync(metaPath, JSON.stringify(meta, null, 2)); updated++; }
|
||||
} else {
|
||||
// Create new
|
||||
const existingClass = classMap.get(key);
|
||||
meta = {
|
||||
id: i + 1,
|
||||
q, r,
|
||||
mapId,
|
||||
hexSizePx: hexSize,
|
||||
meilenPerHex,
|
||||
pixelCenter: { x: Math.round(pixelCenter.x), y: Math.round(pixelCenter.y) },
|
||||
pixelBounds,
|
||||
labels: [],
|
||||
notes: '',
|
||||
classification: existingClass
|
||||
? {
|
||||
base: existingClass.base,
|
||||
features: existingClass.features ?? [],
|
||||
method: 'auto',
|
||||
classifiedAt: new Date().toISOString(),
|
||||
}
|
||||
: null,
|
||||
};
|
||||
writeFileSync(metaPath, JSON.stringify(meta, null, 2));
|
||||
created++;
|
||||
}
|
||||
|
||||
allMeta.push(meta);
|
||||
}
|
||||
|
||||
// Write consolidated JSONL (one JSON object per line)
|
||||
writeFileSync(outJsonl, allMeta.map(m => JSON.stringify(m)).join('\n') + '\n');
|
||||
|
||||
console.log(`Meta files: ${created} created, ${updated} updated`);
|
||||
console.log(`Consolidated: ${outJsonl} (${allMeta.length} entries)`);
|
||||
}
|
||||
|
||||
main().catch(err => { console.error(err); process.exit(1); });
|
||||
Reference in New Issue
Block a user