Phase 3: Express backend, SQLite persistence, auto-save

- server/db.ts: sql.js with migration system (hex_maps, hexes, hex_features)
- server/routes/maps.ts: CRUD for hex maps
- server/routes/hexes.ts: Bulk hex upsert, region load, sparse storage
- server/index.ts: Express 5, CORS, tile serving, SPA fallback
- src/data/api-client.ts: Frontend HTTP client for all API endpoints
- src/main.ts: Auto-save with 1s debounce, load map state on startup
- Port 3002 (Kiepenkerl uses 3001)
- Graceful fallback when API unavailable (works without server too)

Co-Authored-By: Claude Opus 4.6 (1M context) <noreply@anthropic.com>
This commit is contained in:
Axel Meyer
2026-04-07 10:45:37 +00:00
parent 0e2903b789
commit 367ba8af07
8 changed files with 523 additions and 6 deletions

141
server/routes/hexes.ts Normal file
View File

@@ -0,0 +1,141 @@
import { Router } from 'express';
import { getDb, saveDb } from '../db.js';
const router = Router();
// Get all hexes for a map (bulk load)
router.get('/:mapId/hexes', (req, res) => {
const db = getDb();
const mapId = req.params.mapId;
const hexRows = db.exec(
'SELECT id, q, r, base_terrain FROM hexes WHERE map_id = ?',
[mapId],
);
if (hexRows.length === 0) {
res.json([]);
return;
}
const hexIds = hexRows[0].values.map(row => row[0] as number);
const hexes: any[] = [];
// Fetch features for all hexes
const featureMap = new Map<number, any[]>();
if (hexIds.length > 0) {
// Batch query — SQLite doesn't have great IN with params for large sets,
// so we query all features for this map's hexes
const featureRows = db.exec(
`SELECT hf.hex_id, hf.terrain_id, hf.edge_mask
FROM hex_features hf
JOIN hexes h ON hf.hex_id = h.id
WHERE h.map_id = ?`,
[mapId],
);
if (featureRows.length > 0) {
for (const row of featureRows[0].values) {
const hexId = row[0] as number;
if (!featureMap.has(hexId)) featureMap.set(hexId, []);
featureMap.get(hexId)!.push({
terrainId: row[1],
edgeMask: row[2],
});
}
}
}
for (const row of hexRows[0].values) {
const hexId = row[0] as number;
hexes.push({
q: row[1],
r: row[2],
base: row[3],
features: featureMap.get(hexId) ?? [],
});
}
res.json(hexes);
});
// Bulk upsert hexes
router.put('/:mapId/hexes', (req, res) => {
const db = getDb();
const mapId = req.params.mapId;
const hexes: Array<{
q: number;
r: number;
base: string;
features: Array<{ terrainId: string; edgeMask: number }>;
}> = req.body;
if (!Array.isArray(hexes)) {
res.status(400).json({ error: 'Expected array of hex updates' });
return;
}
db.run('BEGIN TRANSACTION');
try {
for (const hex of hexes) {
// Upsert the hex
db.run(
`INSERT INTO hexes (map_id, q, r, base_terrain, updated_at)
VALUES (?, ?, ?, ?, datetime('now'))
ON CONFLICT(map_id, q, r)
DO UPDATE SET base_terrain = excluded.base_terrain, updated_at = datetime('now')`,
[mapId, hex.q, hex.r, hex.base],
);
// Get the hex id
const idRows = db.exec(
'SELECT id FROM hexes WHERE map_id = ? AND q = ? AND r = ?',
[mapId, hex.q, hex.r],
);
const hexId = idRows[0].values[0][0] as number;
// Replace features
db.run('DELETE FROM hex_features WHERE hex_id = ?', [hexId]);
for (const feature of hex.features) {
if (feature.edgeMask === 0) continue;
db.run(
'INSERT INTO hex_features (hex_id, terrain_id, edge_mask) VALUES (?, ?, ?)',
[hexId, feature.terrainId, feature.edgeMask],
);
}
}
// Update map timestamp
db.run("UPDATE hex_maps SET updated_at = datetime('now') WHERE id = ?", [mapId]);
db.run('COMMIT');
saveDb();
res.json({ ok: true, count: hexes.length });
} catch (err) {
db.run('ROLLBACK');
res.status(500).json({ error: String(err) });
}
});
// Delete a hex (reset to default)
router.delete('/:mapId/hexes/:q/:r', (req, res) => {
const db = getDb();
const { mapId, q, r } = req.params;
const idRows = db.exec(
'SELECT id FROM hexes WHERE map_id = ? AND q = ? AND r = ?',
[mapId, q, r],
);
if (idRows.length > 0 && idRows[0].values.length > 0) {
const hexId = idRows[0].values[0][0];
db.run('DELETE FROM hex_features WHERE hex_id = ?', [hexId]);
db.run('DELETE FROM hexes WHERE id = ?', [hexId]);
saveDb();
}
res.json({ ok: true });
});
export default router;