Add per-hex metadata system for scale-invariant classification
Each hex now gets a meta/<q>_<r>.json with stable ID, pixel center coordinates, pixel bounds, labels, notes, and classification status. The pixelCenter acts as a scale-independent anchor: when switching from 10 Meilen/Hex to 5 Meilen/Hex, pixelToAxial(meta.pixelCenter, newSize) maps coarse hexes to fine hexes without re-running classification. Adds: - pipeline/build-hexmeta.ts: creates/updates metadata + exports data/hexmeta-<map-id>.jsonl (committed, survives git clones) - pipeline/auto-classify-ocean.ts: pixel-based ocean auto-detection - pipeline/create-map.ts: one-off DB map entry creation - extract-submaps.ts: writes meta/<q>_<r>.json during extraction - data/hexmeta-1.jsonl: 8844 hex metadata entries for Aventurien map 1 Co-Authored-By: Claude Sonnet 4.6 <noreply@anthropic.com>
This commit is contained in:
5
.gitignore
vendored
5
.gitignore
vendored
@@ -5,6 +5,9 @@ dist-server/
|
||||
tiles/
|
||||
data/*.db
|
||||
|
||||
# Pipeline artefakte — lokal erzeugte Karten, Tiles, Submaps
|
||||
# Pipeline artefakte — lokal erzeugte Karten, Tiles, Submaps (PNGs + rohe JSONs)
|
||||
pipeline/source/
|
||||
pipeline/submaps/
|
||||
|
||||
# data/ enthält committete Artefakte (hexmeta-*.jsonl), aber keine DB-Dateien
|
||||
data/*.db
|
||||
|
||||
8844
data/hexmeta-1.jsonl
Normal file
8844
data/hexmeta-1.jsonl
Normal file
File diff suppressed because it is too large
Load Diff
@@ -14,6 +14,7 @@
|
||||
"pipeline:tiles": "tsx pipeline/generate-tiles.ts",
|
||||
"pipeline:assemble": "tsx pipeline/assemble-map.ts",
|
||||
"pipeline:extract": "tsx pipeline/extract-submaps.ts",
|
||||
"pipeline:hexmeta": "tsx pipeline/build-hexmeta.ts",
|
||||
"pipeline:import": "tsx pipeline/import-from-json.ts"
|
||||
},
|
||||
"devDependencies": {
|
||||
|
||||
136
pipeline/auto-classify-ocean.ts
Normal file
136
pipeline/auto-classify-ocean.ts
Normal file
@@ -0,0 +1,136 @@
|
||||
/**
|
||||
* Auto-classify ocean and off-map hexes from the source image.
|
||||
* Writes to classifications.json (appends / merges with existing entries).
|
||||
*
|
||||
* Ocean detection heuristic:
|
||||
* - Pixel is blue-dominant (Aventurien ocean ≈ #2a5574, border ≈ #2a5574)
|
||||
* - OR pixel is very light (white map border / outside image)
|
||||
* - OR hex center is outside image bounds
|
||||
*
|
||||
* Everything else is left for manual (vision) classification.
|
||||
*
|
||||
* Usage:
|
||||
* npx tsx pipeline/auto-classify-ocean.ts <map-id>
|
||||
*/
|
||||
|
||||
import sharp from 'sharp';
|
||||
import { readFileSync, writeFileSync, existsSync } from 'fs';
|
||||
import { join, resolve, dirname } from 'path';
|
||||
import { fileURLToPath } from 'url';
|
||||
import { axialToPixel } from '../core/coords.js';
|
||||
import type { PixelCoord } from '../core/types.js';
|
||||
|
||||
const ROOT = resolve(dirname(fileURLToPath(import.meta.url)), '..');
|
||||
|
||||
function isOcean(r: number, g: number, b: number): boolean {
|
||||
// Off-map / white border
|
||||
if (r > 200 && g > 200 && b > 200) return true;
|
||||
// Black map border / decoration lines
|
||||
if (r < 50 && g < 50 && b < 50) return true;
|
||||
// Very dark (near-black) border elements
|
||||
if (Math.max(r, g, b) < 60) return true;
|
||||
// Blue-dominant ocean (Aventurien sea ≈ rgb(42,85,116) and similar)
|
||||
if (b > 80 && b > r * 1.3 && b >= g) return true;
|
||||
// Bright cyan/turquoise ocean variants with text overlays
|
||||
if (b > 100 && g > 100 && r < 100) return true;
|
||||
// Dark border padding (rgb(42,85,116) added by tile generator)
|
||||
if (b > 90 && r < 70 && g < 110) return true;
|
||||
return false;
|
||||
}
|
||||
|
||||
async function main() {
|
||||
const mapId = parseInt(process.argv[2], 10);
|
||||
if (isNaN(mapId)) {
|
||||
console.error('Usage: npx tsx pipeline/auto-classify-ocean.ts <map-id>');
|
||||
process.exit(1);
|
||||
}
|
||||
|
||||
const submapDir = join(ROOT, 'pipeline', 'submaps', String(mapId));
|
||||
const manifestPath = join(submapDir, 'manifest.json');
|
||||
const classPath = join(submapDir, 'classifications.json');
|
||||
const imagePath = join(ROOT, 'pipeline', 'source', 'aventurien-8000x12000.jpg');
|
||||
|
||||
if (!existsSync(manifestPath)) {
|
||||
console.error('No manifest found. Run extract-submaps first.');
|
||||
process.exit(1);
|
||||
}
|
||||
|
||||
const manifest = JSON.parse(readFileSync(manifestPath, 'utf-8'));
|
||||
const { hexes, hexSize, originX, originY, imageWidth, imageHeight } = manifest;
|
||||
const origin: PixelCoord = { x: originX, y: originY };
|
||||
|
||||
// Load existing classifications
|
||||
const existing = new Map<string, object>();
|
||||
if (existsSync(classPath)) {
|
||||
const arr: Array<{ q: number; r: number }> = JSON.parse(readFileSync(classPath, 'utf-8'));
|
||||
for (const entry of arr) existing.set(`${entry.q},${entry.r}`, entry);
|
||||
}
|
||||
|
||||
console.log(`Manifest: ${hexes.length} hexes. Already classified: ${existing.size}`);
|
||||
console.log(`Loading source image...`);
|
||||
|
||||
// Load full image as raw RGB buffer for fast pixel sampling
|
||||
const { data, info } = await sharp(imagePath)
|
||||
.raw()
|
||||
.toBuffer({ resolveWithObject: true });
|
||||
|
||||
function samplePixel(px: number, py: number): [number, number, number] | null {
|
||||
const x = Math.round(px);
|
||||
const y = Math.round(py);
|
||||
if (x < 0 || x >= info.width || y < 0 || y >= info.height) return null;
|
||||
const idx = (y * info.width + x) * info.channels;
|
||||
return [data[idx], data[idx + 1], data[idx + 2]];
|
||||
}
|
||||
|
||||
let autoOcean = 0;
|
||||
let skipped = 0;
|
||||
let needsManual = 0;
|
||||
|
||||
const results: object[] = [...existing.values()];
|
||||
|
||||
for (const { q, r } of hexes) {
|
||||
const key = `${q},${r}`;
|
||||
if (existing.has(key)) { skipped++; continue; }
|
||||
|
||||
const px = axialToPixel({ q, r }, hexSize, origin);
|
||||
|
||||
// Sample center + 6 inner points at 50% radius for robustness
|
||||
const SQRT3 = Math.sqrt(3);
|
||||
const innerR = hexSize * 0.5;
|
||||
const samplePoints = [
|
||||
{ x: px.x, y: px.y },
|
||||
...Array.from({ length: 6 }, (_, i) => ({
|
||||
x: px.x + innerR * Math.cos(Math.PI / 3 * i),
|
||||
y: px.y + innerR * Math.sin(Math.PI / 3 * i),
|
||||
})),
|
||||
];
|
||||
|
||||
let oceanVotes = 0;
|
||||
let totalSampled = 0;
|
||||
for (const pt of samplePoints) {
|
||||
const pixel = samplePixel(pt.x, pt.y);
|
||||
if (pixel === null) { oceanVotes++; totalSampled++; continue; }
|
||||
totalSampled++;
|
||||
if (isOcean(pixel[0], pixel[1], pixel[2])) oceanVotes++;
|
||||
}
|
||||
|
||||
// Ocean if majority of sampled points look like ocean
|
||||
const isOceanHex = oceanVotes >= Math.ceil(totalSampled * 0.6);
|
||||
|
||||
if (isOceanHex) {
|
||||
results.push({ q, r, base: 'ocean', features: [] });
|
||||
autoOcean++;
|
||||
} else {
|
||||
needsManual++;
|
||||
}
|
||||
}
|
||||
|
||||
writeFileSync(classPath, JSON.stringify(results, null, 0) + '\n');
|
||||
|
||||
console.log(`\nAuto-classified: ${autoOcean} ocean hexes`);
|
||||
console.log(`Skipped (already done): ${skipped}`);
|
||||
console.log(`Remaining for manual classification: ${needsManual}`);
|
||||
console.log(`\nclassifications.json: ${results.length} entries total`);
|
||||
}
|
||||
|
||||
main().catch(err => { console.error(err); process.exit(1); });
|
||||
174
pipeline/build-hexmeta.ts
Normal file
174
pipeline/build-hexmeta.ts
Normal file
@@ -0,0 +1,174 @@
|
||||
/**
|
||||
* Build or update hex metadata files.
|
||||
*
|
||||
* Creates pipeline/submaps/<map-id>/meta/<q>_<r>.json per hex and
|
||||
* a consolidated data/hexmeta-<map-id>.jsonl (committed to git).
|
||||
*
|
||||
* Metadata per hex:
|
||||
* id — stable sequential integer, assigned once from manifest order
|
||||
* q, r — axial coordinates at the current scale
|
||||
* pixelCenter — center in source image pixels (scale-independent anchor)
|
||||
* pixelBounds — approximate pixel coverage in source image
|
||||
* hexSizePx — hex size at time of extraction
|
||||
* meilenPerHex — scale (e.g. 10)
|
||||
* mapId — DB map id
|
||||
* labels — text labels visible in the submap (manually noted or OCR)
|
||||
* notes — free-form observations
|
||||
* classification — {base, features, method, classifiedAt} or null
|
||||
*
|
||||
* Cross-scale lookup:
|
||||
* To find which coarse hex (hexSize=40) a fine hex (hexSize=20) falls in:
|
||||
* pixelToAxial(meta.pixelCenter, 40, origin)
|
||||
* The pixelCenter is always stored at the ORIGINAL source image resolution.
|
||||
*
|
||||
* Usage:
|
||||
* npx tsx pipeline/build-hexmeta.ts <map-id>
|
||||
* npx tsx pipeline/build-hexmeta.ts <map-id> --update-from-classifications
|
||||
*/
|
||||
|
||||
import { readFileSync, writeFileSync, mkdirSync, existsSync } from 'fs';
|
||||
import { join, resolve, dirname } from 'path';
|
||||
import { fileURLToPath } from 'url';
|
||||
import { axialToPixel } from '../core/coords.js';
|
||||
import type { PixelCoord } from '../core/types.js';
|
||||
|
||||
const ROOT = resolve(dirname(fileURLToPath(import.meta.url)), '..');
|
||||
const PIXELS_PER_MEILE = 8;
|
||||
|
||||
export interface HexMeta {
|
||||
id: number;
|
||||
q: number;
|
||||
r: number;
|
||||
mapId: number;
|
||||
hexSizePx: number;
|
||||
meilenPerHex: number;
|
||||
pixelCenter: PixelCoord;
|
||||
pixelBounds: { left: number; top: number; right: number; bottom: number };
|
||||
labels: string[];
|
||||
notes: string;
|
||||
classification: {
|
||||
base: string;
|
||||
features: Array<{ terrainId: string; edgeMask: number }>;
|
||||
method: 'auto' | 'manual' | 'manual-session';
|
||||
classifiedAt: string;
|
||||
} | null;
|
||||
}
|
||||
|
||||
function computeBounds(
|
||||
cx: number, cy: number,
|
||||
hexSize: number,
|
||||
imageWidth: number,
|
||||
imageHeight: number,
|
||||
cropFactor = 2.8,
|
||||
): { left: number; top: number; right: number; bottom: number } {
|
||||
const r = Math.ceil(hexSize * cropFactor);
|
||||
return {
|
||||
left: Math.max(0, Math.round(cx - r)),
|
||||
top: Math.max(0, Math.round(cy - r)),
|
||||
right: Math.min(imageWidth, Math.round(cx + r)),
|
||||
bottom: Math.min(imageHeight, Math.round(cy + r)),
|
||||
};
|
||||
}
|
||||
|
||||
async function main() {
|
||||
const mapId = parseInt(process.argv[2], 10);
|
||||
if (isNaN(mapId)) {
|
||||
console.error('Usage: npx tsx pipeline/build-hexmeta.ts <map-id>');
|
||||
process.exit(1);
|
||||
}
|
||||
|
||||
const updateFromClassifications = process.argv.includes('--update-from-classifications');
|
||||
const submapDir = join(ROOT, 'pipeline', 'submaps', String(mapId));
|
||||
const metaDir = join(submapDir, 'meta');
|
||||
const manifestPath = join(submapDir, 'manifest.json');
|
||||
const classPath = join(submapDir, 'classifications.json');
|
||||
const outJsonl = join(ROOT, 'data', `hexmeta-${mapId}.jsonl`);
|
||||
|
||||
if (!existsSync(manifestPath)) {
|
||||
console.error('No manifest found. Run extract-submaps first.');
|
||||
process.exit(1);
|
||||
}
|
||||
|
||||
mkdirSync(metaDir, { recursive: true });
|
||||
mkdirSync(join(ROOT, 'data'), { recursive: true });
|
||||
|
||||
const manifest = JSON.parse(readFileSync(manifestPath, 'utf-8'));
|
||||
const { hexes, hexSize, originX, originY, imageWidth, imageHeight, meilenPerHex } = manifest;
|
||||
const origin: PixelCoord = { x: originX, y: originY };
|
||||
|
||||
// Load existing classifications for merge
|
||||
const classMap = new Map<string, { base: string; features: any[] }>();
|
||||
if (existsSync(classPath)) {
|
||||
for (const c of JSON.parse(readFileSync(classPath, 'utf-8'))) {
|
||||
classMap.set(`${c.q},${c.r}`, c);
|
||||
}
|
||||
}
|
||||
|
||||
let created = 0;
|
||||
let updated = 0;
|
||||
const allMeta: HexMeta[] = [];
|
||||
|
||||
for (let i = 0; i < hexes.length; i++) {
|
||||
const { q, r } = hexes[i];
|
||||
const metaPath = join(metaDir, `${q}_${r}.json`);
|
||||
const key = `${q},${r}`;
|
||||
|
||||
const pixelCenter = axialToPixel({ q, r }, hexSize, origin);
|
||||
const pixelBounds = computeBounds(pixelCenter.x, pixelCenter.y, hexSize, imageWidth, imageHeight);
|
||||
|
||||
let meta: HexMeta;
|
||||
|
||||
if (existsSync(metaPath)) {
|
||||
// Load and potentially update existing
|
||||
meta = JSON.parse(readFileSync(metaPath, 'utf-8'));
|
||||
let changed = false;
|
||||
|
||||
if (updateFromClassifications && classMap.has(key) && !meta.classification) {
|
||||
const c = classMap.get(key)!;
|
||||
meta.classification = {
|
||||
base: c.base,
|
||||
features: c.features ?? [],
|
||||
method: 'auto',
|
||||
classifiedAt: new Date().toISOString(),
|
||||
};
|
||||
changed = true;
|
||||
}
|
||||
|
||||
if (changed) { writeFileSync(metaPath, JSON.stringify(meta, null, 2)); updated++; }
|
||||
} else {
|
||||
// Create new
|
||||
const existingClass = classMap.get(key);
|
||||
meta = {
|
||||
id: i + 1,
|
||||
q, r,
|
||||
mapId,
|
||||
hexSizePx: hexSize,
|
||||
meilenPerHex,
|
||||
pixelCenter: { x: Math.round(pixelCenter.x), y: Math.round(pixelCenter.y) },
|
||||
pixelBounds,
|
||||
labels: [],
|
||||
notes: '',
|
||||
classification: existingClass
|
||||
? {
|
||||
base: existingClass.base,
|
||||
features: existingClass.features ?? [],
|
||||
method: 'auto',
|
||||
classifiedAt: new Date().toISOString(),
|
||||
}
|
||||
: null,
|
||||
};
|
||||
writeFileSync(metaPath, JSON.stringify(meta, null, 2));
|
||||
created++;
|
||||
}
|
||||
|
||||
allMeta.push(meta);
|
||||
}
|
||||
|
||||
// Write consolidated JSONL (one JSON object per line)
|
||||
writeFileSync(outJsonl, allMeta.map(m => JSON.stringify(m)).join('\n') + '\n');
|
||||
|
||||
console.log(`Meta files: ${created} created, ${updated} updated`);
|
||||
console.log(`Consolidated: ${outJsonl} (${allMeta.length} entries)`);
|
||||
}
|
||||
|
||||
main().catch(err => { console.error(err); process.exit(1); });
|
||||
13
pipeline/create-map.ts
Normal file
13
pipeline/create-map.ts
Normal file
@@ -0,0 +1,13 @@
|
||||
import { initDb, getDb, saveDb } from '../server/db.js';
|
||||
|
||||
await initDb();
|
||||
const db = getDb();
|
||||
db.run(
|
||||
`INSERT INTO hex_maps (name, image_width, image_height, tile_url, min_zoom, max_zoom, hex_size, origin_x, origin_y)
|
||||
VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?)`,
|
||||
['Aventurien 10M/Hex', 8000, 12000, '/tiles/{z}/{x}/{y}.jpg', 0, 6, 40, 0, 0],
|
||||
);
|
||||
const rows = db.exec('SELECT last_insert_rowid()');
|
||||
const id = rows[0].values[0][0];
|
||||
saveDb();
|
||||
console.log('Map ID:', id);
|
||||
@@ -23,6 +23,7 @@ import { initDb, getDb } from '../server/db.js';
|
||||
import { axialToPixel, hexVertices } from '../core/coords.js';
|
||||
import { gridBoundsForImage } from '../core/hex-grid.js';
|
||||
import { HexEdge, EDGE_DIRECTIONS, ALL_EDGES, type AxialCoord, type PixelCoord } from '../core/types.js';
|
||||
import type { HexMeta } from './build-hexmeta.js';
|
||||
|
||||
const ROOT = resolve(dirname(fileURLToPath(import.meta.url)), '..');
|
||||
|
||||
@@ -183,7 +184,7 @@ async function main() {
|
||||
const outDir = join(ROOT, 'pipeline', 'submaps', String(mapId));
|
||||
mkdirSync(outDir, { recursive: true });
|
||||
|
||||
// Write manifest
|
||||
// Write manifest (with stable IDs, starting at 1)
|
||||
const manifest = {
|
||||
mapId,
|
||||
imageWidth: image_width,
|
||||
@@ -192,17 +193,47 @@ async function main() {
|
||||
originX: origin_x,
|
||||
originY: origin_y,
|
||||
meilenPerHex: hexesPerMeile,
|
||||
hexes: sorted.map(c => ({ q: c.q, r: c.r })),
|
||||
hexes: sorted.map((c, i) => ({ id: i + 1, q: c.q, r: c.r })),
|
||||
};
|
||||
writeFileSync(join(outDir, 'manifest.json'), JSON.stringify(manifest, null, 2));
|
||||
console.log(`Manifest written: ${sorted.length} hexes`);
|
||||
|
||||
// Create meta directory for per-hex attribute files
|
||||
const metaDir = join(outDir, 'meta');
|
||||
mkdirSync(metaDir, { recursive: true });
|
||||
|
||||
let done = 0;
|
||||
for (const coord of sorted) {
|
||||
for (let i = 0; i < sorted.length; i++) {
|
||||
const coord = sorted[i];
|
||||
const filename = `${coord.q}_${coord.r}.png`;
|
||||
const outPath = join(outDir, filename);
|
||||
const metaPath = join(metaDir, `${coord.q}_${coord.r}.json`);
|
||||
|
||||
// Skip if already extracted (resumable)
|
||||
// Create meta file if not present (never overwrite existing — preserves labels/notes)
|
||||
if (!existsSync(metaPath)) {
|
||||
const px = axialToPixel(coord, hex_size, origin);
|
||||
const r = Math.ceil(hex_size * CROP_RADIUS_FACTOR);
|
||||
const meta: HexMeta = {
|
||||
id: i + 1,
|
||||
q: coord.q, r: coord.r,
|
||||
mapId,
|
||||
hexSizePx: hex_size,
|
||||
meilenPerHex: hexesPerMeile,
|
||||
pixelCenter: { x: Math.round(px.x), y: Math.round(px.y) },
|
||||
pixelBounds: {
|
||||
left: Math.max(0, Math.round(px.x - r)),
|
||||
top: Math.max(0, Math.round(px.y - r)),
|
||||
right: Math.min(image_width, Math.round(px.x + r)),
|
||||
bottom: Math.min(image_height, Math.round(px.y + r)),
|
||||
},
|
||||
labels: [],
|
||||
notes: '',
|
||||
classification: null,
|
||||
};
|
||||
writeFileSync(metaPath, JSON.stringify(meta, null, 2));
|
||||
}
|
||||
|
||||
// Skip PNG if already extracted (resumable)
|
||||
if (existsSync(outPath)) {
|
||||
done++;
|
||||
continue;
|
||||
|
||||
Reference in New Issue
Block a user