This commit is contained in:
Stan
2026-04-19 21:14:16 +02:00
parent 0c74a75126
commit 28d167f11f
42 changed files with 5681 additions and 55 deletions
+54 -7
View File
@@ -1,29 +1,76 @@
import cors from 'cors';
import express from 'express';
import { fileURLToPath } from 'node:url';
import path from 'node:path';
import { errorHandler, notFoundHandler } from './middleware/errorHandler.js';
import configRoutes from './routes/configRoutes.js';
import healthRoutes from './routes/healthRoutes.js';
import lookupRoutes from './routes/lookupRoutes.js';
import reportRoutes from './routes/reportRoutes.js';
import templateRoutes from './routes/templateRoutes.js';
const app = express();
/*
* The application serves two concerns from the same Express process:
* 1. a versioned REST API (v1) used by the proof-of-concept frontend,
* 2. static frontend assets for the chooser portal and the app shell itself.
*
* All API endpoints live under /api/v1/ so the contract can evolve without
* breaking older clients. Keeping both concerns in one process keeps the PoC
* easy to run in Docker and avoids introducing an additional frontend dev
* server before the product shape is stable.
*/
const publicDir = fileURLToPath(new URL('../public', import.meta.url));
const userPagePath = path.join(publicDir, 'user.html');
const adminPagePath = path.join(publicDir, 'admin.html');
const portalPath = path.join(publicDir, 'portal.html');
app.use(cors());
app.use(express.json({ limit: '10mb' }));
app.get('/', (_req, res) => {
app.get('/api/v1', (_req, res) => {
res.json({
service: 'check-list-poc-api',
version: '0.1.0',
description: 'PoC API for template and configuration delivery.'
version: '0.2.0',
description: 'Versioned PoC API for template, configuration, and report management.'
});
});
app.use('/api/health', healthRoutes);
app.use('/api/templates', templateRoutes);
app.use('/api/lookups', lookupRoutes);
app.use('/api/config', configRoutes);
/*
* All API routes are grouped under /api/v1/. The version prefix ensures future
* breaking changes can be introduced on /api/v2/ without disrupting existing
* frontend deployments that still reference v1 contract shapes.
*/
app.use('/api/v1/health', healthRoutes);
app.use('/api/v1/templates', templateRoutes);
app.use('/api/v1/lookups', lookupRoutes);
app.use('/api/v1/config', configRoutes);
app.use('/api/v1/reports', reportRoutes);
/*
* The root route intentionally serves a neutral portal page. This gives the
* project distinct user and administrator entry points without introducing a
* full authentication flow yet.
*/
app.get('/', (_req, res) => {
res.sendFile(portalPath);
});
/*
* User and admin workspaces live in separate HTML files so each page only loads
* the markup it needs. The shared frontend JavaScript (app.js) detects which
* elements are present and binds behavior accordingly.
*/
app.get(['/user', '/user/'], (_req, res) => {
res.sendFile(userPagePath);
});
app.get(['/admin', '/admin/'], (_req, res) => {
res.sendFile(adminPagePath);
});
app.use(express.static(publicDir));
app.use(notFoundHandler);
app.use(errorHandler);
+11
View File
@@ -4,12 +4,23 @@ dotenv.config();
const requiredKeys = ['DB_HOST', 'DB_PORT', 'DB_NAME', 'DB_USER', 'DB_PASSWORD'];
/*
* Environment validation happens at module load time so configuration mistakes
* are discovered immediately. That is intentional because this service is small
* enough that there is no benefit in deferring a misconfiguration error until a
* later database call or request handler.
*/
for (const key of requiredKeys) {
if (!process.env[key]) {
throw new Error(`Missing required environment variable: ${key}`);
}
}
/*
* The exported env object becomes the single place where raw process variables
* are normalized into application-friendly types such as numbers. That keeps the
* rest of the codebase from repeating string-to-number conversion logic.
*/
export const env = {
port: Number(process.env.PORT || 3000),
db: {
+10
View File
@@ -2,6 +2,11 @@ import * as mariadb from 'mariadb';
import { env } from '../config/env.js';
/*
* One shared pool is enough for the current backend because the service is read-
* heavy and low volume. Centralizing pool creation here prevents each route or
* service module from opening its own connections and makes shutdown predictable.
*/
const pool = mariadb.createPool({
host: env.db.host,
port: env.db.port,
@@ -16,6 +21,11 @@ export async function query(sql, params = []) {
let connection;
try {
/*
* The helper deliberately exposes a low-level query primitive instead of a
* custom repository abstraction. For the PoC that keeps SQL visible and easy
* to reason about while still ensuring every query uses the same pool.
*/
connection = await pool.getConnection();
return await connection.query(sql, params);
} finally {
+35
View File
@@ -0,0 +1,35 @@
/*
* Parameter validation middleware. Each route parameter is checked against
* a safe pattern to prevent unexpected input from reaching database queries.
* The whitelist approach rejects obviously invalid identifiers early, keeping
* service-layer code cleaner.
*/
const SAFE_CODE_PATTERN = /^[a-zA-Z0-9_-]{1,100}$/;
const SAFE_UUID_PATTERN = /^[a-f0-9-]{36}$/;
export function validateParam(paramName, { pattern = null } = {}) {
const resolvedPattern = pattern || (paramName.toLowerCase().includes('id') ? SAFE_UUID_PATTERN : SAFE_CODE_PATTERN);
return (req, res, next) => {
const value = req.params[paramName];
if (!value || !resolvedPattern.test(value)) {
return res.status(400).json({ message: `Invalid parameter: ${paramName}` });
}
next();
};
}
export function validateNumericParam(paramName) {
return (req, res, next) => {
const value = Number(req.params[paramName]);
if (!Number.isFinite(value) || value < 0) {
return res.status(400).json({ message: `Invalid numeric parameter: ${paramName}` });
}
next();
};
}
+113 -2
View File
@@ -3,22 +3,128 @@ import { Router } from 'express';
import {
getAppConfig,
getExportProfile,
getImageRules
getImageRules,
updateImageRules
} from '../services/configService.js';
import { logAuditEvent } from '../services/auditService.js';
import { asyncHandler } from '../utils/asyncHandler.js';
import { configCache } from '../services/cacheService.js';
const router = Router();
/*
* Image-rules validation is shared between server and admin frontend. The server
* acts as the final authority while the client validates proactively to give the
* administrator immediate feedback before the round-trip.
*/
function validateImageRulesPayload(payload) {
const oversizeBehaviors = ['auto_optimize', 'warn_then_optimize', 'block'];
const allowedMimeTypes = Array.isArray(payload.allowedMimeTypes)
? payload.allowedMimeTypes.filter((value) => typeof value === 'string' && value.trim())
: [];
if (!payload.name || typeof payload.name !== 'string') {
return 'Image policy name is required.';
}
if (!allowedMimeTypes.length) {
return 'At least one allowed MIME type is required.';
}
if (!Number.isInteger(payload.maxFileSizeBytes) || payload.maxFileSizeBytes <= 0) {
return 'Maximum file size must be a positive integer.';
}
if (!Number.isInteger(payload.maxWidthPx) || payload.maxWidthPx <= 0) {
return 'Maximum width must be a positive integer.';
}
if (!Number.isInteger(payload.maxHeightPx) || payload.maxHeightPx <= 0) {
return 'Maximum height must be a positive integer.';
}
if (!Number.isInteger(payload.jpegQuality) || payload.jpegQuality < 1 || payload.jpegQuality > 100) {
return 'JPEG quality must be an integer between 1 and 100.';
}
if (!oversizeBehaviors.includes(payload.oversizeBehavior)) {
return 'Oversize behavior is invalid.';
}
if (!Number.isInteger(payload.maxAttachmentsPerField) || payload.maxAttachmentsPerField <= 0) {
return 'Maximum attachments per field must be a positive integer.';
}
return null;
}
router.get(
'/image-rules',
asyncHandler(async (_req, res) => {
const cached = configCache.get('image-rules');
if (cached) {
return res.json(cached);
}
const imageRules = await getImageRules();
if (!imageRules) {
return res.status(404).json({ message: 'Image rules not found.' });
}
res.json(imageRules);
configCache.set('image-rules', imageRules);
return res.json(imageRules);
})
);
router.put(
'/image-rules',
asyncHandler(async (req, res) => {
/*
* Normalize incoming values before validation so the API can accept the
* browser form payload in a predictable shape. The frontend sends numbers as
* form values, but they still arrive over HTTP as strings until coerced.
*/
const payload = {
name: req.body?.name?.trim(),
allowedMimeTypes: Array.isArray(req.body?.allowedMimeTypes)
? req.body.allowedMimeTypes.map((value) => String(value).trim()).filter(Boolean)
: [],
maxFileSizeBytes: Number(req.body?.maxFileSizeBytes),
maxWidthPx: Number(req.body?.maxWidthPx),
maxHeightPx: Number(req.body?.maxHeightPx),
jpegQuality: Number(req.body?.jpegQuality),
oversizeBehavior: req.body?.oversizeBehavior,
maxAttachmentsPerField: Number(req.body?.maxAttachmentsPerField)
};
const validationMessage = validateImageRulesPayload(payload);
if (validationMessage) {
return res.status(400).json({ message: validationMessage });
}
/* Capture the current value before mutation for the audit trail. */
const previousRules = await getImageRules();
const imageRules = await updateImageRules(payload);
if (!imageRules) {
return res.status(404).json({ message: 'Image rules not found.' });
}
configCache.invalidate('image-rules');
await logAuditEvent({
entityType: 'image_rules',
entityCode: imageRules.code,
action: 'update',
oldValue: previousRules,
newValue: imageRules
});
return res.json(imageRules);
})
);
@@ -38,6 +144,11 @@ router.get(
router.get(
'/app-config',
asyncHandler(async (_req, res) => {
/*
* Generic application configuration is kept as a simple key/value list in
* the PoC. This avoids hardcoding small behavioral settings in the frontend
* while still keeping the schema easy to inspect and evolve.
*/
const config = await getAppConfig();
res.json({ items: config });
})
+5
View File
@@ -8,6 +8,11 @@ const router = Router();
router.get(
'/',
asyncHandler(async (_req, res) => {
/*
* The health endpoint checks the database on purpose instead of only proving
* that Express can answer HTTP. In this project, the server is not useful if
* MariaDB is unavailable, so health must include that dependency.
*/
await query('SELECT 1 AS ok');
res.json({
+30 -1
View File
@@ -2,26 +2,55 @@ import { Router } from 'express';
import { getLookup, listLookups } from '../services/lookupService.js';
import { asyncHandler } from '../utils/asyncHandler.js';
import { validateParam } from '../middleware/validateParams.js';
import { lookupCache } from '../services/cacheService.js';
const router = Router();
router.get(
'/',
asyncHandler(async (_req, res) => {
/*
* The bulk lookup endpoint is convenient for client startup because dropdown
* lists are small and reused across many dynamic fields. Fetching them in one
* call keeps the frontend startup sequence short.
*/
const cached = lookupCache.get('all-lookups');
if (cached) {
return res.json(cached);
}
const lookups = await listLookups();
res.json({ items: lookups });
const payload = { items: lookups };
lookupCache.set('all-lookups', payload);
return res.json(payload);
})
);
router.get(
'/:lookupCode',
validateParam('lookupCode'),
asyncHandler(async (req, res) => {
/*
* The single-lookup endpoint is still useful for debugging and for possible
* future optimization if the number of lookup sets grows and startup payloads
* need to become more selective.
*/
const cacheKey = `lookup-${req.params.lookupCode}`;
const cached = lookupCache.get(cacheKey);
if (cached) {
return res.json(cached);
}
const lookup = await getLookup(req.params.lookupCode);
if (!lookup) {
return res.status(404).json({ message: 'Lookup not found.' });
}
lookupCache.set(cacheKey, lookup);
return res.json(lookup);
})
);
+75
View File
@@ -0,0 +1,75 @@
import { Router } from 'express';
import { getReport, listReports, submitReport } from '../services/reportService.js';
import { logAuditEvent } from '../services/auditService.js';
import { asyncHandler } from '../utils/asyncHandler.js';
import { validateParam } from '../middleware/validateParams.js';
const router = Router();
/*
* Report submission accepts the full local report payload (answers, template
* binding, status) and stores it server-side. This bridges the offline-first
* client workflow with centralized storage for review and archival.
*/
router.get(
'/',
asyncHandler(async (req, res) => {
const reports = await listReports({
status: req.query.status || undefined,
templateCode: req.query.templateCode || undefined,
limit: Math.min(Number(req.query.limit) || 100, 500),
offset: Math.max(Number(req.query.offset) || 0, 0)
});
res.json({ items: reports });
})
);
router.get(
'/:reportId',
validateParam('reportId'),
asyncHandler(async (req, res) => {
const report = await getReport(req.params.reportId);
if (!report) {
return res.status(404).json({ message: 'Report not found.' });
}
return res.json(report);
})
);
router.post(
'/',
asyncHandler(async (req, res) => {
const body = req.body;
if (!body?.id || !body?.reportNumber || !body?.templateCode || !body?.templateVersion || !body?.answers) {
return res.status(400).json({
message: 'id, reportNumber, templateCode, templateVersion, and answers are required.'
});
}
const report = await submitReport({
id: String(body.id).trim(),
reportNumber: String(body.reportNumber).trim(),
templateCode: String(body.templateCode).trim(),
templateVersion: Number(body.templateVersion),
status: body.status || 'exported',
answers: body.answers
});
await logAuditEvent({
entityType: 'report',
entityCode: report.id,
action: 'submit',
newValue: { reportNumber: report.reportNumber, templateCode: report.templateCode }
});
return res.status(201).json(report);
})
);
export default router;
+99 -4
View File
@@ -2,37 +2,100 @@ import { Router } from 'express';
import {
getActiveTemplate,
getAllActiveTemplates,
getTemplateVersion,
listTemplates
listTemplates,
listTemplateVersions,
publishTemplateVersion
} from '../services/templateService.js';
import { logAuditEvent } from '../services/auditService.js';
import { asyncHandler } from '../utils/asyncHandler.js';
import { validateParam, validateNumericParam } from '../middleware/validateParams.js';
import { templateCache } from '../services/cacheService.js';
const router = Router();
router.get(
'/',
asyncHandler(async (_req, res) => {
asyncHandler(async (req, res) => {
/*
* When ?include=definitions is set the response embeds the full JSON
* definition for every active template. This eliminates the N+1 round-trip
* the old client performed (list → fetch each) and makes initial sync a
* single request. Without the flag the response stays lightweight.
*/
const includeDefinitions = req.query.include === 'definitions';
const cacheKey = `templates-list-${includeDefinitions}`;
const cached = templateCache.get(cacheKey);
if (cached) {
return res.json(cached);
}
if (includeDefinitions) {
const templates = await getAllActiveTemplates();
const payload = { items: templates };
templateCache.set(cacheKey, payload);
return res.json(payload);
}
const templates = await listTemplates();
res.json({ items: templates });
const payload = { items: templates };
templateCache.set(cacheKey, payload);
return res.json(payload);
})
);
router.get(
'/:templateCode',
validateParam('templateCode'),
asyncHandler(async (req, res) => {
/*
* New reports always use the latest active template, so the primary route is
* optimized for that case. Older versions remain accessible through the
* versioned route so existing drafts can stay bound to the original schema.
*/
const cacheKey = `template-active-${req.params.templateCode}`;
const cached = templateCache.get(cacheKey);
if (cached) {
return res.json(cached);
}
const template = await getActiveTemplate(req.params.templateCode);
if (!template) {
return res.status(404).json({ message: 'Template not found.' });
}
templateCache.set(cacheKey, template);
return res.json(template);
})
);
router.get(
'/:templateCode/versions/:versionNumber',
'/:templateCode/versions',
validateParam('templateCode'),
asyncHandler(async (req, res) => {
/*
* Version listing lets the admin workspace display a template's publication
* history and choose which version to activate or review.
*/
const versions = await listTemplateVersions(req.params.templateCode);
return res.json({ items: versions });
})
);
router.get(
'/:templateCode/versions/:versionNumber',
validateParam('templateCode'),
validateNumericParam('versionNumber'),
asyncHandler(async (req, res) => {
/*
* Version-specific access is what allows the frontend to reopen old drafts
* safely even after templates evolve. Without this route, cached reports
* would eventually drift away from the structure they were created against.
*/
const template = await getTemplateVersion(
req.params.templateCode,
req.params.versionNumber
@@ -46,4 +109,36 @@ router.get(
})
);
router.put(
'/:templateCode/versions/:versionNumber/publish',
validateParam('templateCode'),
validateNumericParam('versionNumber'),
asyncHandler(async (req, res) => {
/*
* Publishing a version marks it active and retires the previously active
* version for the same template. This lets the admin promote a draft version
* to production. Existing reports keep their bound version unchanged.
*/
const result = await publishTemplateVersion(
req.params.templateCode,
Number(req.params.versionNumber)
);
if (!result) {
return res.status(404).json({ message: 'Template version not found.' });
}
templateCache.clear();
await logAuditEvent({
entityType: 'template_version',
entityCode: `${req.params.templateCode}::v${req.params.versionNumber}`,
action: 'publish',
newValue: { templateCode: req.params.templateCode, version: Number(req.params.versionNumber) }
});
return res.json(result);
})
);
export default router;
+14
View File
@@ -3,6 +3,11 @@ import { env } from './config/env.js';
import { closePool, query } from './db/pool.js';
async function startServer() {
/*
* Fail fast on startup if the database is not reachable. For this project that
* is preferable to serving the frontend with a broken API because templates,
* lookups, and administrator configuration all depend on MariaDB.
*/
await query('SELECT 1 AS ok');
const server = app.listen(env.port, () => {
@@ -10,6 +15,11 @@ async function startServer() {
});
async function shutdown(signal) {
/*
* Graceful shutdown matters even in a PoC because Docker restarts and local
* stop/start cycles are common during development. Closing the HTTP server
* first and then the connection pool avoids abruptly dropping active work.
*/
console.log(`Received ${signal}, shutting down...`);
server.close(async () => {
await closePool();
@@ -22,6 +32,10 @@ async function startServer() {
}
startServer().catch(async (error) => {
/*
* If startup fails after the pool has been created, explicitly end the pool so
* the process does not linger with open handles and confusing partial state.
*/
console.error('Failed to start server');
console.error(error);
await closePool();
+75
View File
@@ -0,0 +1,75 @@
import { query } from '../db/pool.js';
/*
* The audit service records every administrative mutation so the team can trace
* when configuration changed and what the previous value was. Each row captures
* the entity type (e.g. "image_rules"), the entity identifier, the action name,
* and JSON snapshots of the old and new values.
*/
export async function logAuditEvent({ entityType, entityCode, action, oldValue = null, newValue = null }) {
await query(
`
INSERT INTO audit_log (entity_type, entity_code, action, old_value_json, new_value_json)
VALUES (?, ?, ?, ?, ?)
`,
[
entityType,
entityCode,
action,
oldValue ? JSON.stringify(oldValue) : null,
newValue ? JSON.stringify(newValue) : null
]
);
}
export async function getAuditLog({ entityType, entityCode, limit = 50 } = {}) {
let sql = 'SELECT id, entity_type, entity_code, action, old_value_json, new_value_json, created_at FROM audit_log';
const params = [];
const clauses = [];
if (entityType) {
clauses.push('entity_type = ?');
params.push(entityType);
}
if (entityCode) {
clauses.push('entity_code = ?');
params.push(entityCode);
}
if (clauses.length) {
sql += ` WHERE ${clauses.join(' AND ')}`;
}
sql += ' ORDER BY id DESC LIMIT ?';
params.push(limit);
const rows = await query(sql, params);
return rows.map((row) => ({
id: row.id,
entityType: row.entity_type,
entityCode: row.entity_code,
action: row.action,
oldValue: safeParseJson(row.old_value_json),
newValue: safeParseJson(row.new_value_json),
createdAt: row.created_at
}));
}
function safeParseJson(value) {
if (value == null) {
return null;
}
if (typeof value === 'object') {
return value;
}
try {
return JSON.parse(value);
} catch {
return null;
}
}
+70
View File
@@ -0,0 +1,70 @@
/*
* Simple in-memory LRU cache for read-heavy data such as templates and lookups.
* Each cache entry tracks its last-access timestamp. When the cache exceeds the
* configured maximum size the least-recently-used entry is evicted automatically.
*
* This avoids hitting MariaDB on every request for data that changes rarely while
* keeping the implementation dependency-free.
*/
export function createCache({ maxEntries = 100, ttlMs = 5 * 60 * 1000 } = {}) {
const store = new Map();
function get(key) {
const entry = store.get(key);
if (!entry) {
return undefined;
}
if (Date.now() - entry.createdAt > ttlMs) {
store.delete(key);
return undefined;
}
entry.lastAccess = Date.now();
return entry.value;
}
function set(key, value) {
if (store.size >= maxEntries) {
evictLru();
}
store.set(key, { value, createdAt: Date.now(), lastAccess: Date.now() });
}
function invalidate(key) {
store.delete(key);
}
function clear() {
store.clear();
}
function evictLru() {
let oldestKey = null;
let oldestAccess = Infinity;
for (const [key, entry] of store) {
if (entry.lastAccess < oldestAccess) {
oldestAccess = entry.lastAccess;
oldestKey = key;
}
}
if (oldestKey !== null) {
store.delete(oldestKey);
}
}
return { get, set, invalidate, clear };
}
/*
* Shared cache instances. Templates and lookups change rarely enough that a
* five-minute TTL is practical during normal operations. The admin invalidates
* the relevant cache key on write so changes appear immediately.
*/
export const templateCache = createCache({ maxEntries: 50, ttlMs: 5 * 60 * 1000 });
export const lookupCache = createCache({ maxEntries: 50, ttlMs: 5 * 60 * 1000 });
export const configCache = createCache({ maxEntries: 20, ttlMs: 2 * 60 * 1000 });
+54
View File
@@ -1,6 +1,12 @@
import { query } from '../db/pool.js';
import { parseJsonColumn } from '../utils/json.js';
/*
* Phase 1 keeps exactly one active image rule set. The frontend asks only for
* the active rule because that matches the current business need: operators use
* the latest centrally managed policy, while drafts themselves do not yet store
* an immutable copy of the rule configuration.
*/
export async function getImageRules() {
const rows = await query(
`
@@ -31,6 +37,49 @@ export async function getImageRules() {
};
}
export async function updateImageRules(nextImageRules) {
const currentRules = await getImageRules();
if (!currentRules) {
return null;
}
/*
* The PoC updates the currently active rule in place instead of creating a new
* version row. That keeps the administrator flow small and easy to reason
* about. If later phases need audit history, this is the point where versioned
* writes or soft-retired rows should be introduced.
*/
await query(
`
UPDATE image_rules
SET
name = ?,
allowed_mime_types_json = ?,
max_file_size_bytes = ?,
max_width_px = ?,
max_height_px = ?,
jpeg_quality = ?,
oversize_behavior = ?,
max_attachments_per_field = ?
WHERE code = ?
`,
[
nextImageRules.name,
JSON.stringify(nextImageRules.allowedMimeTypes),
nextImageRules.maxFileSizeBytes,
nextImageRules.maxWidthPx,
nextImageRules.maxHeightPx,
nextImageRules.jpegQuality,
nextImageRules.oversizeBehavior,
nextImageRules.maxAttachmentsPerField,
currentRules.code
]
);
return getImageRules();
}
export async function getExportProfile() {
const rows = await query(
`
@@ -52,6 +101,11 @@ export async function getExportProfile() {
}
export async function getAppConfig() {
/*
* Config values are stored as JSON so the frontend can receive structured data
* without a separate table for every small setting. The helper converts JSON
* strings into usable objects and arrays before returning them.
*/
const rows = await query(
`
SELECT
+14
View File
@@ -1,6 +1,11 @@
import { query } from '../db/pool.js';
function groupLookups(rows) {
/*
* SQL returns one row per lookup value, but the frontend wants a grouped shape
* where each lookup code owns an array of options. Building that structure here
* keeps the API contract friendly for dynamic form rendering.
*/
const lookups = new Map();
for (const row of rows) {
@@ -26,6 +31,10 @@ function groupLookups(rows) {
}
export async function listLookups() {
/*
* Active lookup values are sorted in SQL so the client receives them in display
* order without additional sorting logic in the browser.
*/
const rows = await query(
`
SELECT
@@ -48,6 +57,11 @@ export async function listLookups() {
}
export async function getLookup(lookupCode) {
/*
* The single-lookup query reuses the same grouping logic as the bulk endpoint,
* which keeps the returned shape consistent regardless of how the frontend or a
* debugging tool chooses to retrieve lookup data.
*/
const rows = await query(
`
SELECT
+108
View File
@@ -0,0 +1,108 @@
import { query } from '../db/pool.js';
import { parseJsonColumn } from '../utils/json.js';
/*
* The report service handles server-side storage of submitted reports. In
* phase 1, reports are created locally in the browser and only uploaded when
* the operator explicitly submits. This keeps the offline-first workflow intact
* while giving the backend a durable copy for review, export, or archival.
*/
export async function submitReport(report) {
await query(
`
INSERT INTO reports (report_uuid, report_number, template_code, template_version, status, answers_json, submitted_at)
VALUES (?, ?, ?, ?, ?, ?, NOW())
ON DUPLICATE KEY UPDATE
status = VALUES(status),
answers_json = VALUES(answers_json),
submitted_at = VALUES(submitted_at),
updated_at = NOW()
`,
[
report.id,
report.reportNumber,
report.templateCode,
report.templateVersion,
report.status,
JSON.stringify(report.answers)
]
);
return getReport(report.id);
}
export async function getReport(reportUuid) {
const rows = await query(
`
SELECT
report_uuid AS reportUuid,
report_number AS reportNumber,
template_code AS templateCode,
template_version AS templateVersion,
status,
answers_json AS answersJson,
submitted_at AS submittedAt,
created_at AS createdAt,
updated_at AS updatedAt
FROM reports
WHERE report_uuid = ?
LIMIT 1
`,
[reportUuid]
);
return rows.length ? mapReportRow(rows[0]) : null;
}
export async function listReports({ status, templateCode, limit = 100, offset = 0 } = {}) {
let sql = `
SELECT
report_uuid AS reportUuid,
report_number AS reportNumber,
template_code AS templateCode,
template_version AS templateVersion,
status,
answers_json AS answersJson,
submitted_at AS submittedAt,
created_at AS createdAt,
updated_at AS updatedAt
FROM reports
`;
const params = [];
const clauses = [];
if (status) {
clauses.push('status = ?');
params.push(status);
}
if (templateCode) {
clauses.push('template_code = ?');
params.push(templateCode);
}
if (clauses.length) {
sql += ` WHERE ${clauses.join(' AND ')}`;
}
sql += ' ORDER BY updated_at DESC LIMIT ? OFFSET ?';
params.push(limit, offset);
const rows = await query(sql, params);
return rows.map(mapReportRow);
}
function mapReportRow(row) {
return {
id: row.reportUuid,
reportNumber: row.reportNumber,
templateCode: row.templateCode,
templateVersion: row.templateVersion,
status: row.status,
answers: parseJsonColumn(row.answersJson, {}),
submittedAt: row.submittedAt,
createdAt: row.createdAt,
updatedAt: row.updatedAt
};
}
+118
View File
@@ -2,6 +2,11 @@ import { query } from '../db/pool.js';
import { parseJsonColumn } from '../utils/json.js';
function mapTemplateRow(row) {
/*
* Template definitions are stored as JSON in MariaDB, but the frontend expects
* them as native objects. The mapper centralizes that translation and keeps the
* route handlers free from storage-specific details.
*/
return {
code: row.code,
name: row.name,
@@ -14,6 +19,11 @@ function mapTemplateRow(row) {
}
export async function listTemplates() {
/*
* Only active versions are listed for new report creation. Retired or draft
* versions may still exist in the database, but they should not appear in the
* main template picker used by operators.
*/
const rows = await query(
`
SELECT
@@ -40,7 +50,40 @@ export async function listTemplates() {
}));
}
export async function getAllActiveTemplates() {
/*
* Batch endpoint: returns every active template with its full JSON definition
* in a single query. This replaces the N+1 pattern where the client listed
* templates then fetched each definition individually — cutting initial sync
* from N+1 round-trips to one.
*/
const rows = await query(
`
SELECT
t.code,
t.name,
t.description,
tv.version_number AS versionNumber,
tv.status,
tv.published_at AS publishedAt,
tv.definition_json AS definitionJson
FROM templates t
INNER JOIN template_versions tv
ON tv.template_id = t.id
AND tv.status = 'active'
ORDER BY t.name ASC
`
);
return rows.map(mapTemplateRow);
}
export async function getActiveTemplate(templateCode) {
/*
* This query returns the single currently active version for a given template
* code. That matches the business rule that new drafts should always start from
* the newest active template definition.
*/
const rows = await query(
`
SELECT
@@ -65,6 +108,11 @@ export async function getActiveTemplate(templateCode) {
}
export async function getTemplateVersion(templateCode, versionNumber) {
/*
* Version-specific reads are intentionally separate from active-template reads
* so draft reopening can be explicit and reliable, even if the active version
* changes later.
*/
const rows = await query(
`
SELECT
@@ -87,3 +135,73 @@ export async function getTemplateVersion(templateCode, versionNumber) {
return rows.length ? mapTemplateRow(rows[0]) : null;
}
export async function listTemplateVersions(templateCode) {
/*
* Returns all versions of a template so the admin workspace can show the full
* version history and allow publishing a different version.
*/
const rows = await query(
`
SELECT
t.code,
t.name,
tv.version_number AS versionNumber,
tv.status,
tv.published_at AS publishedAt
FROM templates t
INNER JOIN template_versions tv
ON tv.template_id = t.id
WHERE t.code = ?
ORDER BY tv.version_number DESC
`,
[templateCode]
);
return rows.map((row) => ({
code: row.code,
name: row.name,
version: row.versionNumber,
status: row.status,
publishedAt: row.publishedAt
}));
}
export async function publishTemplateVersion(templateCode, versionNumber) {
/*
* Publishing retires the currently active version and activates the requested
* one. Both updates run sequentially. In production this would be wrapped in a
* database transaction; the PoC trades strict atomicity for simplicity.
*/
const templateRows = await query(
'SELECT id FROM templates WHERE code = ?',
[templateCode]
);
if (!templateRows.length) {
return null;
}
const templateId = templateRows[0].id;
const versionRows = await query(
'SELECT id FROM template_versions WHERE template_id = ? AND version_number = ?',
[templateId, versionNumber]
);
if (!versionRows.length) {
return null;
}
await query(
"UPDATE template_versions SET status = 'retired' WHERE template_id = ? AND status = 'active'",
[templateId]
);
await query(
"UPDATE template_versions SET status = 'active', published_at = NOW() WHERE id = ?",
[versionRows[0].id]
);
return getActiveTemplate(templateCode);
}