Working version before modification.

This commit is contained in:
Stan
2026-04-20 21:04:54 +02:00
parent 28d167f11f
commit e7127f3215
30 changed files with 7046 additions and 1201 deletions
+1788
View File
File diff suppressed because it is too large Load Diff
+283
View File
@@ -0,0 +1,283 @@
/*
* exif.js — Lightweight EXIF parser for JPEG images in the browser.
*
* Extracts basic EXIF metadata (camera make/model, date taken, GPS coords,
* orientation, dimensions) from a base64 dataUrl or ArrayBuffer.
*
* This is a minimal parser — it handles the most common IFD0 and GPS tags
* without pulling in a full library.
*/
/* ── EXIF Tag IDs ───────────────────────────────────────────────────────── */
const TAGS = {
0x010F: 'make',
0x0110: 'model',
0x0112: 'orientation',
0x011A: 'xResolution',
0x011B: 'yResolution',
0x0132: 'dateTime',
0x8769: 'exifOffset',
0x8825: 'gpsOffset',
0xA002: 'pixelXDimension',
0xA003: 'pixelYDimension',
0x9003: 'dateTimeOriginal',
0x9004: 'dateTimeDigitized',
0x920A: 'focalLength',
0x829A: 'exposureTime',
0x829D: 'fNumber',
0x8827: 'isoSpeed'
};
const GPS_TAGS = {
0x0001: 'latRef',
0x0002: 'lat',
0x0003: 'lonRef',
0x0004: 'lon',
0x0005: 'altRef',
0x0006: 'alt'
};
/* ── Public API ─────────────────────────────────────────────────────────── */
/**
* Parse EXIF from a base64 dataUrl string.
* Returns an object with parsed tags, or null if no EXIF found.
*/
export function parseExifFromDataUrl(dataUrl) {
if (!dataUrl || !dataUrl.startsWith('data:image/jpeg')) return null;
try {
const base64 = dataUrl.split(',')[1];
const binary = atob(base64);
const buffer = new Uint8Array(binary.length);
for (let i = 0; i < binary.length; i++) buffer[i] = binary.charCodeAt(i);
return parseExif(buffer.buffer);
} catch {
return null;
}
}
/**
* Parse EXIF from an ArrayBuffer.
* Returns an object with parsed tags, or null if no EXIF found.
*/
export function parseExif(arrayBuffer) {
const view = new DataView(arrayBuffer);
/* Check JPEG SOI marker */
if (view.getUint16(0) !== 0xFFD8) return null;
/* Find APP1 marker (EXIF) */
let offset = 2;
while (offset < view.byteLength - 4) {
const marker = view.getUint16(offset);
if (marker === 0xFFE1) {
/* Found APP1 */
const length = view.getUint16(offset + 2);
return parseExifData(view, offset + 4, length - 2);
}
/* Skip to next marker */
if ((marker & 0xFF00) !== 0xFF00) break;
const segLen = view.getUint16(offset + 2);
offset += 2 + segLen;
}
return null;
}
/* ── Internal parsing ───────────────────────────────────────────────────── */
function parseExifData(view, start, length) {
/* Check "Exif\0\0" header */
const exifStr = String.fromCharCode(
view.getUint8(start), view.getUint8(start + 1),
view.getUint8(start + 2), view.getUint8(start + 3)
);
if (exifStr !== 'Exif') return null;
const tiffStart = start + 6;
const byteOrder = view.getUint16(tiffStart);
const littleEndian = byteOrder === 0x4949; /* 'II' = Intel = little-endian */
/* Verify TIFF magic number */
if (view.getUint16(tiffStart + 2, littleEndian) !== 0x002A) return null;
const ifd0Offset = view.getUint32(tiffStart + 4, littleEndian);
const result = {};
/* Parse IFD0 */
const ifd0 = parseIFD(view, tiffStart, tiffStart + ifd0Offset, littleEndian, TAGS);
Object.assign(result, ifd0);
/* Parse EXIF sub-IFD if present */
if (ifd0.exifOffset) {
const exifIfd = parseIFD(view, tiffStart, tiffStart + ifd0.exifOffset, littleEndian, TAGS);
delete result.exifOffset;
Object.assign(result, exifIfd);
}
/* Parse GPS IFD if present */
if (ifd0.gpsOffset) {
const gpsIfd = parseIFD(view, tiffStart, tiffStart + ifd0.gpsOffset, littleEndian, GPS_TAGS);
delete result.gpsOffset;
if (gpsIfd.lat && gpsIfd.latRef) {
result.latitude = convertDMSToDD(gpsIfd.lat, gpsIfd.latRef);
}
if (gpsIfd.lon && gpsIfd.lonRef) {
result.longitude = convertDMSToDD(gpsIfd.lon, gpsIfd.lonRef);
}
if (gpsIfd.alt != null) {
result.altitude = gpsIfd.altRef === 1 ? -gpsIfd.alt : gpsIfd.alt;
}
}
/* Clean up internal offsets */
delete result.exifOffset;
delete result.gpsOffset;
return Object.keys(result).length ? result : null;
}
function parseIFD(view, tiffStart, ifdStart, littleEndian, tagMap) {
const result = {};
try {
const entries = view.getUint16(ifdStart, littleEndian);
for (let i = 0; i < entries; i++) {
const entryOffset = ifdStart + 2 + (i * 12);
const tag = view.getUint16(entryOffset, littleEndian);
const tagName = tagMap[tag];
if (!tagName) continue;
const type = view.getUint16(entryOffset + 2, littleEndian);
const count = view.getUint32(entryOffset + 4, littleEndian);
const valueOffset = entryOffset + 8;
result[tagName] = readTagValue(view, tiffStart, type, count, valueOffset, littleEndian);
}
} catch {
/* Gracefully handle malformed EXIF */
}
return result;
}
function readTagValue(view, tiffStart, type, count, valueOffset, littleEndian) {
/* Type: 1=BYTE, 2=ASCII, 3=SHORT, 4=LONG, 5=RATIONAL, 7=UNDEFINED, 10=SRATIONAL */
const typeSize = { 1: 1, 2: 1, 3: 2, 4: 4, 5: 8, 7: 1, 9: 4, 10: 8 };
const totalBytes = (typeSize[type] || 1) * count;
const dataOffset = totalBytes > 4
? tiffStart + view.getUint32(valueOffset, littleEndian)
: valueOffset;
switch (type) {
case 2: { /* ASCII string */
let str = '';
for (let i = 0; i < count - 1; i++) str += String.fromCharCode(view.getUint8(dataOffset + i));
return str.trim();
}
case 3: /* SHORT */
return count === 1 ? view.getUint16(dataOffset, littleEndian) : readArray(view, dataOffset, count, 2, littleEndian);
case 4: /* LONG */
return count === 1 ? view.getUint32(dataOffset, littleEndian) : readArray(view, dataOffset, count, 4, littleEndian);
case 5: /* RATIONAL (unsigned) */
if (count === 1) {
const num = view.getUint32(dataOffset, littleEndian);
const den = view.getUint32(dataOffset + 4, littleEndian);
return den ? num / den : 0;
}
return readRationalArray(view, dataOffset, count, littleEndian);
case 10: /* SRATIONAL (signed) */
if (count === 1) {
const num = view.getInt32(dataOffset, littleEndian);
const den = view.getInt32(dataOffset + 4, littleEndian);
return den ? num / den : 0;
}
return readRationalArray(view, dataOffset, count, littleEndian);
default:
return count === 1 ? view.getUint8(dataOffset) : null;
}
}
function readArray(view, offset, count, size, littleEndian) {
const arr = [];
for (let i = 0; i < count; i++) {
arr.push(size === 2 ? view.getUint16(offset + i * size, littleEndian) : view.getUint32(offset + i * size, littleEndian));
}
return arr;
}
function readRationalArray(view, offset, count, littleEndian) {
const arr = [];
for (let i = 0; i < count; i++) {
const num = view.getUint32(offset + i * 8, littleEndian);
const den = view.getUint32(offset + i * 8 + 4, littleEndian);
arr.push(den ? num / den : 0);
}
return arr;
}
function convertDMSToDD(dms, ref) {
if (!Array.isArray(dms) || dms.length < 3) return null;
let dd = dms[0] + dms[1] / 60 + dms[2] / 3600;
if (ref === 'S' || ref === 'W') dd = -dd;
return Math.round(dd * 1000000) / 1000000;
}
/* ═══════════════════════════════════════════════════════════════════════════
* EXIF preservation: extract raw APP1 segment and re-inject into JPEG
* ═══════════════════════════════════════════════════════════════════════════ */
/**
* Extract the raw EXIF APP1 segment (including FF E1 marker + length) from a
* JPEG ArrayBuffer. Returns a Uint8Array of the full segment, or null if no
* EXIF is found. This raw segment can be re-injected into a new JPEG to
* preserve metadata after canvas operations.
*/
export function extractExifSegment(arrayBuffer) {
const view = new DataView(arrayBuffer);
if (view.byteLength < 4) return null;
/* Check JPEG SOI marker */
if (view.getUint16(0) !== 0xFFD8) return null;
let offset = 2;
while (offset < view.byteLength - 4) {
const marker = view.getUint16(offset);
if (marker === 0xFFE1) {
/* APP1 found — extract the entire segment (marker + length + data) */
const segmentLength = view.getUint16(offset + 2);
const totalLength = 2 + segmentLength; /* marker (2) + length field included in segmentLength */
if (offset + totalLength > view.byteLength) return null;
/* Return a standalone copy so it survives GC of the original buffer */
return new Uint8Array(arrayBuffer.slice(offset, offset + totalLength));
}
/* Not APP1 — skip to next marker */
if ((marker & 0xFF00) !== 0xFF00) break;
const segLen = view.getUint16(offset + 2);
offset += 2 + segLen;
}
return null;
}
/**
* Insert a raw APP1 EXIF segment into a JPEG Blob that lacks one.
* Places the APP1 segment immediately after the SOI marker (FF D8).
* Returns a new Blob with EXIF restored, or the original blob if it's not JPEG
* or if exifSegment is null.
*/
export async function insertExifIntoJpeg(jpegBlob, exifSegment) {
if (!exifSegment || !jpegBlob || jpegBlob.type !== 'image/jpeg') return jpegBlob;
const buffer = await jpegBlob.arrayBuffer();
const view = new DataView(buffer);
if (view.getUint16(0) !== 0xFFD8) return jpegBlob;
/* Build new JPEG: SOI (2 bytes) + EXIF segment + rest of original after SOI */
const soi = new Uint8Array(buffer, 0, 2);
const rest = new Uint8Array(buffer, 2);
const merged = new Uint8Array(soi.length + exifSegment.length + rest.length);
merged.set(soi, 0);
merged.set(exifSegment, 2);
merged.set(rest, 2 + exifSegment.length);
return new Blob([merged], { type: 'image/jpeg' });
}
+26 -5
View File
@@ -4,8 +4,14 @@
* during large-image processing. On browsers that lack OffscreenCanvas support
* (or when running inside a Worker is not possible) the module falls back to
* main-thread canvas operations.
*
* EXIF preservation: Canvas operations strip all metadata. After resize/compress
* we re-inject the original EXIF APP1 segment into the output JPEG so that
* camera info, GPS, date-taken etc. survive the optimization.
*/
import { extractExifSegment, insertExifIntoJpeg } from './exif.js';
let worker = null;
let workerSupported = null;
@@ -35,6 +41,8 @@ function getWorker() {
/*
* Public entry point. Validates the file against the image rules, then delegates
* the actual resize/compress work to the worker or the main-thread fallback.
* After optimization, EXIF metadata from the original file is re-injected into
* the output JPEG so that camera/GPS/date info is preserved.
*/
export async function optimizeImage(file, imageRules) {
if (imageRules?.allowedMimeTypes?.length && !imageRules.allowedMimeTypes.includes(file.type)) {
@@ -45,13 +53,26 @@ export async function optimizeImage(file, imageRules) {
throw new Error(`File exceeds limit: ${file.name}`);
}
const w = getWorker();
if (w) {
return optimizeInWorker(w, file, imageRules);
/* Extract raw EXIF segment from original JPEG before canvas strips it */
let exifSegment = null;
if (file.type === 'image/jpeg') {
try {
const originalBuffer = await file.arrayBuffer();
exifSegment = extractExifSegment(originalBuffer);
} catch { /* non-critical — proceed without EXIF */ }
}
return optimizeOnMainThread(file, imageRules);
const w = getWorker();
const result = w
? await optimizeInWorker(w, file, imageRules)
: await optimizeOnMainThread(file, imageRules);
/* Re-inject EXIF into the optimized JPEG */
if (exifSegment && result.blob.type === 'image/jpeg') {
result.blob = await insertExifIntoJpeg(result.blob, exifSegment);
}
return result;
}
/* ── Worker path ────────────────────────────────────────────────────────── */
+173
View File
@@ -0,0 +1,173 @@
/*
* user-db.js — IndexedDB storage for the user portal.
*
* Stores task data (including heavy base64 image dataUrls) in IndexedDB
* instead of localStorage to avoid the ~5 MB browser quota.
*
* IndexedDB provides hundreds of MB of storage (browser-managed, quota-based)
* which makes it suitable for image-heavy task data.
*
* Usage:
* import { openUserDB, loadTaskData, saveTaskData, getStorageEstimate } from './user-db.js';
*
* await openUserDB(); // Call once on init
* const data = await loadTaskData(); // Returns the full taskData object
* await saveTaskData(data); // Persist updated taskData
* const est = await getStorageEstimate(); // Get usage info
*/
/* ── Constants ──────────────────────────────────────────────────────────── */
const DB_NAME = 'user-portal-db';
const DB_VERSION = 1;
const STORE_TASK_DATA = 'taskData';
/* ── Module-level DB reference ──────────────────────────────────────────── */
let db = null;
/* ── Open / create database ─────────────────────────────────────────────── */
/**
* Opens the IndexedDB database. Must be called once before any read/write.
* Creates the object store on first run or version upgrade.
*/
export function openUserDB() {
return new Promise((resolve, reject) => {
const request = indexedDB.open(DB_NAME, DB_VERSION);
request.onerror = () => reject(request.error);
request.onsuccess = () => {
db = request.result;
resolve(db);
};
request.onupgradeneeded = () => {
const database = request.result;
/* Single store keyed by taskId — each entry holds one task's data */
if (!database.objectStoreNames.contains(STORE_TASK_DATA)) {
database.createObjectStore(STORE_TASK_DATA, { keyPath: 'taskId' });
}
};
});
}
/* ── Read all task data ─────────────────────────────────────────────────── */
/**
* Loads all task data from IndexedDB and returns it as a plain object
* keyed by taskId (same shape as the old localStorage structure).
*
* @returns {Promise<Object>} e.g. { "task-123": { visitDate: "", records: {...} }, ... }
*/
export function loadTaskData() {
return new Promise((resolve, reject) => {
if (!db) { resolve({}); return; }
const tx = db.transaction(STORE_TASK_DATA, 'readonly');
const store = tx.objectStore(STORE_TASK_DATA);
const request = store.getAll();
request.onsuccess = () => {
const result = {};
for (const entry of request.result) {
const { taskId, ...data } = entry;
result[taskId] = data;
}
resolve(result);
};
request.onerror = () => reject(request.error);
});
}
/* ── Save all task data ─────────────────────────────────────────────────── */
/**
* Persists the full taskData object into IndexedDB.
* Each taskId becomes a separate record in the store for efficient access.
*
* @param {Object} taskData - Object keyed by taskId
* @returns {Promise<void>}
*/
export function saveTaskData(taskData) {
return new Promise((resolve, reject) => {
if (!db) { reject(new Error('Database not open')); return; }
const tx = db.transaction(STORE_TASK_DATA, 'readwrite');
const store = tx.objectStore(STORE_TASK_DATA);
for (const [taskId, data] of Object.entries(taskData)) {
store.put({ taskId, ...data });
}
tx.oncomplete = () => resolve();
tx.onerror = () => reject(tx.error);
});
}
/* ── Save single task entry ─────────────────────────────────────────────── */
/**
* Saves or updates a single task's data. More efficient than saving everything
* when only one task changed.
*
* @param {string} taskId
* @param {Object} data - The task data (visitDate, records, etc.)
* @returns {Promise<void>}
*/
export function saveOneTaskData(taskId, data) {
return new Promise((resolve, reject) => {
if (!db) { reject(new Error('Database not open')); return; }
const tx = db.transaction(STORE_TASK_DATA, 'readwrite');
const store = tx.objectStore(STORE_TASK_DATA);
store.put({ taskId, ...data });
tx.oncomplete = () => resolve();
tx.onerror = () => reject(tx.error);
});
}
/* ── Delete a task entry ────────────────────────────────────────────────── */
/**
* Removes a single task's data from IndexedDB.
*
* @param {string} taskId
* @returns {Promise<void>}
*/
export function deleteTaskData(taskId) {
return new Promise((resolve, reject) => {
if (!db) { reject(new Error('Database not open')); return; }
const tx = db.transaction(STORE_TASK_DATA, 'readwrite');
const store = tx.objectStore(STORE_TASK_DATA);
store.delete(taskId);
tx.oncomplete = () => resolve();
tx.onerror = () => reject(tx.error);
});
}
/* ── Storage estimate ───────────────────────────────────────────────────── */
/**
* Returns an estimate of IndexedDB usage (if the StorageManager API is available).
* Falls back to counting serialized task data size.
*
* @param {Object} taskData - Current in-memory taskData for fallback sizing
* @returns {Promise<{usedMB: string, quotaMB: string, pct: number}>}
*/
export async function getStorageEstimate(taskData) {
/* Try the modern Storage API (available in secure contexts) */
if (navigator.storage && navigator.storage.estimate) {
const est = await navigator.storage.estimate();
const usedMB = ((est.usage || 0) / (1024 * 1024)).toFixed(2);
const quotaMB = ((est.quota || 0) / (1024 * 1024)).toFixed(0);
const pct = est.quota ? Math.min(100, ((est.usage / est.quota) * 100)) : 0;
return { usedMB, quotaMB, pct: Math.round(pct) };
}
/* Fallback: estimate from serialized data */
const json = JSON.stringify(taskData || {});
const bytes = json.length * 2; /* UTF-16 */
const usedMB = (bytes / (1024 * 1024)).toFixed(2);
return { usedMB, quotaMB: '∞', pct: 0 };
}
+1588
View File
File diff suppressed because it is too large Load Diff