AI-Object-Detection-Exhibition/index.html

1129 lines
No EOL
54 KiB
HTML

<!DOCTYPE html>
<html>
<head>
<title>AI Night Vision Camera</title>
<script src="https://cdn.jsdelivr.net/npm/@tensorflow/tfjs"></script>
<script src="https://cdn.jsdelivr.net/npm/@tensorflow/tfjs-backend-webgpu"></script>
<script src="https://cdn.jsdelivr.net/npm/@tensorflow-models/coco-ssd"></script>
<script src="https://cdn.jsdelivr.net/npm/@tensorflow-models/mobilenet"></script>
<script src="https://cdn.jsdelivr.net/npm/yolo-tfjs-vision@latest/dist/yolo.umd.js"></script>
<style>
body {
margin: 0;
padding: 0;
background: #111;
color: #ccc;
font-family: 'Courier New', Courier, monospace;
overflow: hidden;
display: flex;
justify-content: center;
align-items: center;
height: 100vh;
}
.container {
display: flex;
flex-direction: column;
align-items: center;
width: 100%;
height: 100%;
position: relative;
}
.video-container {
position: absolute;
top: 0;
left: 0;
width: 100%;
height: 100%;
overflow: hidden;
background: #000;
}
#video {
width: 100%;
height: 100%;
object-fit: cover;
transform: scaleX(-1);
}
#canvas {
position: absolute;
top: 0;
left: 0;
width: 100%;
height: 100%;
}
.controls {
position: absolute;
bottom: 60px;
left: 50%;
transform: translateX(-50%);
z-index: 10;
display: flex;
gap: 15px;
transition: opacity 0.3s ease; /* Add transition for smooth hide/show */
}
/* Added CSS class to hide the controls */
.controls.hidden {
opacity: 0;
pointer-events: none; /* Prevent interaction when hidden */
}
/* Added CSS class to hide the detection info */
.detection-info.hidden {
opacity: 0;
pointer-events: none; /* Prevent interaction when hidden */
}
button {
background: rgba(0, 70, 150, 0.7); /* Updated blue */
color: #ccc;
border: 1px solid #0078D4; /* Updated blue */
padding: 8px 15px;
border-radius: 2px;
cursor: pointer;
font-weight: bold;
font-family: 'Courier New', Courier, monospace;
transition: all 0.2s;
font-size: 12px;
}
button:hover {
background: rgba(0, 120, 212, 0.9); /* Updated blue */
box-shadow: 0 0 8px #0078D4; /* Updated blue */
}
.detection-info {
position: absolute;
bottom: 10px;
left: 10px;
right: 10px;
z-index: 10;
padding: 8px;
background: rgba(50, 50, 50, 0.7);
border: 1px solid #888;
border-radius: 2px;
font-size: 12px;
color: #0078D4; /* Updated blue */
transition: opacity 0.3s ease; /* Add transition for smooth hide/show */
}
.stats {
display: flex;
justify-content: space-between;
margin-top: 10px;
font-size: 14px;
color: #0078D4; /* Updated blue */
}
.night-vision {
filter: brightness(1.5) contrast(1.8) grayscale(0.8) hue-rotate(90deg) sepia(0.2);
}
/* Simple glitch effect styling */
.glitch-line {
position: absolute;
left: 0;
width: 100%;
height: 1px; /* Thin lines */
background-color: rgba(0, 120, 212, 0.2); /* Faint blue */
z-index: 5; /* Below controls, above video potentially */
animation: glitch-flicker 0.1s infinite alternate;
display: none; /* Hidden by default */
}
@keyframes glitch-flicker {
0% { opacity: 0.1; }
100% { opacity: 0.3; }
}
</style>
</head>
<body>
<div class="container">
<div class="video-container">
<video id="video" autoplay playsinline></video>
<canvas id="canvas"></canvas>
<!-- Glitch effect elements (optional, can be done purely in JS canvas too) -->
<!-- <div class="glitch-line" style="top: 20%;"></div> -->
<!-- <div class="glitch-line" style="top: 60%;"></div> -->
</div>
<div class="controls">
<select id="resolutionSelector" style="background: rgba(50, 50, 50, 0.7); color: #ccc; border: 1px solid #888; padding: 8px 10px; border-radius: 2px; font-family: 'Courier New', Courier, monospace; font-size: 12px;">
<option value="640x480">480p</option>
<option value="1280x720">720p</option>
<option value="1920x1080">1080p</option>
</select>
<select id="modelSelector" style="background: rgba(50, 50, 50, 0.7); color: #ccc; border: 1px solid #888; padding: 8px 10px; border-radius: 2px; font-family: 'Courier New', Courier, monospace; font-size: 12px;">
<option value="cocoSsd">Detection Protocol: COCO-SSD</option>
<option value="mobileNet">Analysis Protocol: MobileNet</option>
<option value="yoloTfjsVision">Detection Protocol: YOLO-Vision</option>
</select>
<button onclick="toggleNightVision()">Toggle Night Vision</button>
<button onclick="toggleDetection()">Toggle Detection</button>
<button id="soundToggle" onclick="toggleSound()">Sound FX: OFF</button>
<button onclick="toggleHideUI()">Hide UI (H)</button>
</div>
<div class="detection-info">
<div id="detections">SYSTEM OFFLINE</div>
<div class="stats">
<span id="fps">FPS: 0</span>
<span id="objects">LOGGED ENTITIES: 0</span>
</div>
</div>
</div>
<!-- External MP3 sound source -->
<audio id="detectionSound" src="https://codeskulptor-demos.commondatastorage.googleapis.com/pang/pop.mp3" preload="auto"></audio>
<script type="module">
// Import pipeline dynamically for Transformers.js
// This allows top-level await and module features
// import { pipeline } from 'https://cdn.jsdelivr.net/npm/@xenova/transformers@2'; // Removed DETR
let video = document.getElementById('video');
let canvas = document.getElementById('canvas');
let ctx = canvas.getContext('2d');
let model; // Will hold the currently loaded model instance (TFJS model or Transformers.js pipeline)
let currentModelName = 'cocoSsd'; // Default model
let predictFunction; // Can be TFJS predict or Transformers.js pipeline call
let drawFunction;
let isNightVision = false;
let isDetecting = false;
let lastTime = performance.now();
let frameCount = 0;
let imageProcessor;
const modelSelector = document.getElementById('modelSelector');
let isSoundEnabled = false;
let detectionSoundElement;
let soundInitialized = false; // Flag to track if user interaction enabled sound
let yoloVisionInstance;
const controlsElement = document.querySelector('.controls'); // Get reference to controls
const detectionInfoElement = document.querySelector('.detection-info'); // Get reference to detection info
let detectionLoopStarted = false;
let inferenceSkipCounter = 0;
let yoloDebugLogged = false;
let isSwitchingResolution = false;
let resumeDetectionAfterResolution = false;
let lastDimensionWarningAt = 0;
// Ominous labels mapping (primarily for COCO-SSD)
const scaryLabels = {
person: 'SUBJECT',
car: 'ASSET [MOTORIZED]',
truck: 'ASSET [HEAVY DUTY]',
bus: 'ASSET [MASS TRANSIT]',
motorcycle: 'ASSET [MOBILE]',
bicycle: 'ASSET [MOBILE]',
traffic_light: 'CONTROL SIGNAL',
stop_sign: 'CONTROL SIGNAGE',
cat: 'BIO-SIGNATURE [FELINE]',
dog: 'BIO-SIGNATURE [CANINE]',
bird: 'BIO-SIGNATURE [AVIAN]',
// Add more mappings as needed
default: 'UNKNOWN ENTITY' // Added a default
};
function getScaryLabel(className) {
// Use specific scary label or generate a generic one
return scaryLabels[className] || `ENTITY [${className ? className.toUpperCase() : 'UNKNOWN'}]`;
}
function drawDetectionLabel(ctx, text, boxX, boxY, boxW, boxH, canvasWidth, canvasHeight) {
const labelFontSize = 12;
const labelFont = `${labelFontSize}px "Courier New", Courier, monospace`;
const horizontalPadding = 6;
const verticalPadding = 4;
const labelHeight = labelFontSize + (verticalPadding * 2);
ctx.font = labelFont;
ctx.textAlign = 'left';
ctx.textBaseline = 'middle';
const measuredTextWidth = ctx.measureText(text).width;
const labelWidth = Math.max(
Math.min(measuredTextWidth + (horizontalPadding * 2), canvasWidth),
Math.min(boxW, canvasWidth)
);
if (labelWidth <= 0) return;
let labelX = boxX;
if (labelX + labelWidth > canvasWidth) {
labelX = Math.max(0, canvasWidth - labelWidth);
}
let labelY = boxY - labelHeight;
if (labelY < 0) {
labelY = Math.min(boxY + boxH, canvasHeight - labelHeight);
}
labelY = Math.max(0, Math.min(labelY, canvasHeight - labelHeight));
ctx.fillStyle = 'rgba(0, 70, 150, 0.9)';
ctx.fillRect(labelX, labelY, labelWidth, labelHeight);
ctx.fillStyle = '#d9e7ff';
ctx.save();
ctx.beginPath();
ctx.rect(labelX, labelY, labelWidth, labelHeight);
ctx.clip();
ctx.fillText(
text,
labelX + horizontalPadding,
labelY + (labelHeight / 2)
);
ctx.restore();
}
// --- Drawing Functions ---
// Draw results for COCO-SSD (Object Detection)
function drawCocoSsd(predictions, ctx, vW, vH, cW, cH, vScale, sW, sH, videoOffsetX, videoOffsetY, canvasScaleX, canvasScaleY) {
ctx.clearRect(0, 0, vW, vH); // Clear canvas using buffer dimensions
let subjectDetectedThisFrame = false;
predictions.forEach(prediction => {
const scaryLabel = getScaryLabel(prediction.class);
const score = Math.round(prediction.score * 100);
const [detX, detY, detW, detH] = prediction.bbox; // Detections in native video coords
// Original drawing coordinates (potentially scaled)
// Assuming vW/vH are the canvas buffer dimensions matching video native dimensions
// and canvasScaleX/Y are 1 if canvas buffer size matches video native size.
const drawX = detX * canvasScaleX;
const drawY = detY * canvasScaleY;
const drawW = detW * canvasScaleX;
const drawH = detH * canvasScaleY;
// Calculate the FLIPPED X coordinate for drawing because the video display is flipped
const flippedDrawX = vW - drawX - drawW;
// Draw bounding box (Blue) - Use flipped X coordinate
ctx.strokeStyle = '#0078D4'; // Updated blue
ctx.lineWidth = 1;
ctx.strokeRect(flippedDrawX, drawY, drawW, drawH);
drawDetectionLabel(
ctx,
`${scaryLabel} ${score}%`,
flippedDrawX,
drawY,
drawW,
drawH,
vW,
vH
);
// Check if a subject (person) is detected for sound trigger
if (prediction.class === 'person') {
subjectDetectedThisFrame = true;
}
});
// Play sound if enabled and a subject was detected
if (isSoundEnabled && subjectDetectedThisFrame && detectionSoundElement && soundInitialized) {
// Add slight random delay to sound playback for effect
setTimeout(() => {
if (isSoundEnabled && subjectDetectedThisFrame && detectionSoundElement && soundInitialized) { // Double check state
detectionSoundElement.play().catch(e => {
// Log specific errors, ignore AbortError if sound toggled off quickly
if (e.name !== 'AbortError') {
console.warn(`Audio playback failed during detection: ${e.name} - ${e.message}`);
}
}); // Play static click
}
}, Math.random() * 50); // 0-50ms delay
}
// Update detection info specific to COCO-SSD (more alarming)
document.getElementById('objects').textContent =
`LOGGED ENTITIES: ${predictions.length}`;
document.getElementById('detections').innerHTML =
predictions.map(p =>
`TRACKING ${getScaryLabel(p.class)} (${Math.round(p.score * 100)}% Conf.)`
).join('<br>') || 'SYSTEM SCANNING: NO ENTITIES DETECTED';
}
// Draw results for MobileNet (Classification)
function drawMobileNet(predictions, ctx, vW, vH, cW, cH) {
ctx.clearRect(0, 0, vW, vH); // Clear canvas
if (predictions && predictions.length > 0) {
const topPrediction = predictions[0];
const classificationLabel = `PRIMARY SIGNATURE: ${topPrediction.className.toUpperCase()}`;
const confidenceScore = `CONFIDENCE LEVEL: ${Math.round(topPrediction.probability * 100)}%`;
const statusText = `SYSTEM ANALYSIS: ${classificationLabel}`;
// Draw classification label background
ctx.fillStyle = 'rgba(0, 100, 0, 0.8)'; // Greenish background
ctx.fillRect(5, vH - 55, vW - 10, 50); // Position at bottom
// Draw classification text
ctx.fillStyle = '#cccccc';
ctx.font = '16px "Courier New", Courier, monospace';
ctx.fillText(classificationLabel, 15, vH - 35);
ctx.fillText(confidenceScore, 15, vH - 15);
// Update detection info specific to MobileNet (more alarming)
document.getElementById('objects').textContent = 'SYSTEM STATUS: ANALYZING SIGNATURES';
document.getElementById('detections').innerHTML = statusText;
} else {
document.getElementById('objects').textContent = 'SYSTEM STATUS: AWAITING SIGNATURE DATA';
document.getElementById('detections').innerHTML = 'No classification data available.';
}
}
// Draw results for YOLO-TFJS-VISION
function drawYoloTfjsVision(detections, ctx, vW, vH, cW, cH, vScale, sW, sH, videoOffsetX, videoOffsetY, canvasScaleX, canvasScaleY) {
ctx.clearRect(0, 0, vW, vH); // Clear canvas using buffer dimensions
let subjectDetectedThisFrame = false;
let validDetectionsDrawn = 0;
let invalidDetectionsSkipped = 0;
const detectionLines = [];
// detections object: { boxes: [y1, x1, y2, x2, ...], scores: [...], classes: [...], labels: [...] }
const { boxes, scores, labels } = detections;
// Ensure detections are valid before proceeding
if (!boxes || !scores || !labels || !video.videoWidth || !video.videoHeight) {
console.warn("Invalid detections object or video dimensions in drawYoloTfjsVision:", detections, video.videoWidth, video.videoHeight);
document.getElementById('detections').innerHTML = 'SYSTEM SCANNING [YOLO-V]: WAITING FOR VALID DATA...';
return;
}
for (let i = 0; i < scores.length; ++i) {
// Check if data for this index is valid
if (boxes.length < (i * 4 + 4) || scores.length <= i || labels.length <= i) {
console.warn(`Inconsistent data for detection index ${i}`, detections);
continue; // Skip this detection
}
const score = Math.round(scores[i] * 100);
const label = labels[i] ? labels[i].toUpperCase() : 'UNKNOWN ENTITY'; // Use label directly
const boxIndex = i * 4;
const [detY1, detX1, detY2, detX2] = boxes.slice(boxIndex, boxIndex + 4);
// yolo-tfjs-vision returns pixel-space coordinates [y1, x1, y2, x2].
if ([detY1, detX1, detY2, detX2].some(coord => Number.isNaN(coord) || !Number.isFinite(coord))) {
invalidDetectionsSkipped++;
continue; // Skip malformed boxes
}
// Clamp coordinates to the source frame and skip non-positive dimensions.
const clampedX1 = Math.min(Math.max(detX1, 0), video.videoWidth);
const clampedY1 = Math.min(Math.max(detY1, 0), video.videoHeight);
const clampedX2 = Math.min(Math.max(detX2, 0), video.videoWidth);
const clampedY2 = Math.min(Math.max(detY2, 0), video.videoHeight);
const detW = (clampedX2 - clampedX1);
const detH = (clampedY2 - clampedY1);
// Check for invalid dimensions
if (detW <= 0 || detH <= 0) {
invalidDetectionsSkipped++;
continue; // Skip drawing this box
}
// Scale detection coordinates to canvas buffer size
const drawX = clampedX1 * canvasScaleX;
const drawY = clampedY1 * canvasScaleY;
const drawW = detW * canvasScaleX;
const drawH = detH * canvasScaleY;
// Calculate the FLIPPED X coordinate for drawing
const flippedDrawX = vW - drawX - drawW;
// Draw bounding box (Microsoft Blue for this model too)
ctx.strokeStyle = '#0078D4'; // Updated blue
ctx.lineWidth = 1;
ctx.strokeRect(flippedDrawX, drawY, drawW, drawH);
drawDetectionLabel(
ctx,
`${label} ${score}% CONFIDENCE`,
flippedDrawX,
drawY,
drawW,
drawH,
vW,
vH
);
validDetectionsDrawn++;
detectionLines.push(`TRACKING ${label} (${score}% Conf.)`);
// Check if a 'person' is detected for sound trigger
if (labels[i] === 'person') {
subjectDetectedThisFrame = true;
}
}
// Play sound if enabled and a subject was detected
if (isSoundEnabled && subjectDetectedThisFrame && detectionSoundElement && soundInitialized) {
setTimeout(() => {
if (isSoundEnabled && subjectDetectedThisFrame && detectionSoundElement && soundInitialized) {
detectionSoundElement.play().catch(e => {
if (e.name !== 'AbortError') {
console.warn(`Audio playback failed during detection: ${e.name} - ${e.message}`);
}
});
}
}, Math.random() * 50);
}
// Update detection info specific to YOLO-TFJS-VISION
document.getElementById('objects').textContent =
`TRACKED ENTITIES [YOLO-V]: ${validDetectionsDrawn}`;
if (scores.length === 0) {
document.getElementById('detections').innerHTML = 'SYSTEM SCANNING [YOLO-V]: NO ENTITIES DETECTED';
} else if (validDetectionsDrawn === 0) {
document.getElementById('detections').innerHTML = `SYSTEM SCANNING [YOLO-V]: DETECTIONS INVALID (${invalidDetectionsSkipped} SKIPPED)`;
} else {
document.getElementById('detections').innerHTML = detectionLines.join('<br>');
}
}
// --- Model Configuration ---
const modelConfigs = {
cocoSsd: {
load: async () => await cocoSsd.load(),
predict: async (video) => await model.detect(video),
draw: drawCocoSsd,
label: 'Detection Protocol: COCO-SSD',
type: 'tfjs'
},
mobileNet: {
load: async () => await mobilenet.load(),
predict: async (video) => await model.classify(video),
draw: drawMobileNet,
label: 'Analysis Protocol: MobileNet',
type: 'tfjs'
},
yoloTfjsVision: {
modelUrl: 'yolo11n_web_model/model.json',
// labels: [ ... ],
load: async (config) => { // Accept config as argument
console.log(">>> yoloTfjsVision.load: Received config argument:", JSON.stringify(config)); // Log received config
if (!config || typeof config.modelUrl === 'undefined') {
console.error(">>> yoloTfjsVision.load: Config or modelUrl is missing! Received:", JSON.stringify(config)); // Log if missing
throw new Error("Configuration or modelUrl is missing in load function argument.");
}
if (typeof YOLO === 'undefined') {
throw new Error("YOLO class (yolo-tfjs-vision) not found. Check script import.");
}
yoloVisionInstance = new YOLO();
console.log("YOLO-TFJS-Vision instance created.");
document.getElementById('detections').innerHTML = `SYSTEM REBOOT: Configuring YOLO-Vision...`;
// yolo-tfjs-vision is built around TFJS WebGL backend.
try {
await tf.setBackend('webgl');
await tf.ready();
console.log('YOLO backend set to WebGL');
} catch (backendErr) {
console.warn('Could not switch YOLO to WebGL backend. Continuing with current backend.', backendErr);
}
await yoloVisionInstance.setup({
modelUrl: config.modelUrl, // Use config.modelUrl
scoreThreshold: 0.3,
// labels: config.labels // Use config.labels if defined
});
console.log("YOLO-TFJS-Vision configured with model URL:", config.modelUrl);
document.getElementById('detections').innerHTML = `SYSTEM REBOOT: Loading YOLO-Vision Model Weights...`;
const loadedTfjsModel = await yoloVisionInstance.loadModel();
console.log("YOLO-TFJS-Vision model loaded:", loadedTfjsModel);
return loadedTfjsModel;
},
predict: null, // Prediction handled by detect method's callback
draw: drawYoloTfjsVision, // Ensure draw function is assigned
label: 'Detection Protocol: YOLO-Vision',
type: 'yolo-tfjs-vision'
}
};
// Resize canvas to match video dimensions
function resizeCanvas() {
if (video.videoWidth > 0 && video.videoHeight > 0) {
canvas.width = video.videoWidth;
canvas.height = video.videoHeight;
} else {
// Fallback or wait if dimensions aren't ready
canvas.width = 640; // Default width
canvas.height = 480; // Default height
}
console.log(`Canvas resized to: ${canvas.width}x${canvas.height}`);
// Force redraw if detection is active
if (isDetecting && model && predictFunction && drawFunction) {
// Request immediate redraw after resize if needed
// detect(); // Careful with immediate calls, might conflict with animation frame
}
}
// Handle model selection change
async function handleModelChange() {
const newModelName = modelSelector.value;
if (newModelName === currentModelName) return; // No change
console.log(`Changing model to: ${newModelName}`);
isDetecting = false; // Turn off detection during model switch
// Get config object first for safer access
const config = modelConfigs[newModelName];
if (!config) {
console.error(`Configuration not found for model: ${newModelName}`);
document.getElementById('detections').innerHTML = `ERROR: Unknown model selected: ${newModelName}.`;
return; // Stop if config doesn't exist
}
document.getElementById('detections').innerHTML = `SYSTEM REBOOT: Initializing ${config.label}...`; // Use config.label
document.getElementById('objects').textContent = 'SYSTEM STATUS: INITIALIZING MODULE';
ctx.clearRect(0, 0, canvas.width, canvas.height); // Clear canvas
try {
// Reset yolo instance if switching away from it
if (currentModelName === 'yoloTfjsVision') {
yoloVisionInstance = null;
}
if (newModelName === 'yoloTfjsVision') {
yoloDebugLogged = false;
}
// Add detailed logging before the call
console.log(">>> handleModelChange: Config object BEFORE calling load:", JSON.stringify(config));
console.log(">>> handleModelChange: typeof config.load BEFORE calling:", typeof config.load);
if (typeof config.load !== 'function') {
throw new Error("config.load is not a function!");
}
// Now use the pre-fetched config object
model = await config.load(config); // Pass config object as argument
predictFunction = config.predict;
drawFunction = config.draw;
currentModelName = newModelName;
console.log(`${config.label} initialized.`);
document.getElementById('detections').innerHTML = `SYSTEM READY: ${config.label}. Activate Detection.`;
document.getElementById('objects').textContent = 'SYSTEM STATUS: STANDBY';
// Reset FPS counter on model change
lastTime = performance.now();
frameCount = 0;
document.getElementById('fps').textContent = 'FPS: 0';
} catch (err) {
console.error(`Error loading model ${newModelName}:`, err);
// Use the safe config.label from above
document.getElementById('detections').innerHTML = `ERROR: Failed to load ${config.label}. Check console.`;
// Optionally revert to previous model or disable detection
model = null;
predictFunction = null;
drawFunction = null;
// Reset yolo instance if it was the one that failed
if (newModelName === 'yoloTfjsVision') yoloVisionInstance = null;
} finally {
// Re-enable detection toggle maybe? Or leave it off.
}
}
// Initialize camera and the default AI model
async function init() {
try {
await tf.setBackend('webgpu');
console.log('Using WebGPU backend');
} catch (e) {
console.warn('WebGPU not supported, falling back to WebGL', e);
await tf.setBackend('webgl');
}
detectionSoundElement = document.getElementById('detectionSound'); // Get audio element
// Try to mute/unmute to enable playback later - often needed due to browser restrictions
if (detectionSoundElement) {
detectionSoundElement.muted = true;
}
currentModelName = modelSelector.value || 'cocoSsd'; // Get initial model from selector
console.log(`Initializing with model: ${currentModelName}`);
// Get config object first for safer access
const initialConfig = modelConfigs[currentModelName];
if (!initialConfig) {
console.error(`FATAL: Initial configuration not found for model: ${currentModelName}`);
document.getElementById('detections').innerHTML = `**SYSTEM ERROR**: Unknown Initial Model (${currentModelName}). Refresh Required.`;
return; // Stop initialization if config is missing
}
document.getElementById('detections').innerHTML = `Initializing ${initialConfig.label}...`; // Use initialConfig.label
// Load the selected model
try {
// Add detailed logging before the call (init)
console.log(">>> init: Config object BEFORE calling load:", JSON.stringify(initialConfig));
console.log(">>> init: typeof initialConfig.load BEFORE calling:", typeof initialConfig.load);
if (typeof initialConfig.load !== 'function') {
throw new Error("initialConfig.load is not a function!");
}
// Use the pre-fetched initialConfig object
model = await initialConfig.load(initialConfig); // Pass config object as argument
console.log(`${initialConfig.label} loaded.`);
document.getElementById('detections').innerHTML = `SYSTEM READY: ${initialConfig.label}.`;
document.getElementById('objects').textContent = 'SYSTEM STATUS: STANDBY';
} catch (err) {
console.error(`FATAL: Initial model load failed (${currentModelName}):`, err);
// Use the safe initialConfig.label from above
document.getElementById('detections').innerHTML = `**SYSTEM ERROR**: Initial Analysis Module Load Failed (${initialConfig.label}). Code: ${err.name}. Refresh Required.`;
return; // Stop initialization if model fails
}
// Setup camera
const [initWidth, initHeight] = document.getElementById('resolutionSelector').value.split('x').map(Number);
const constraints = {
video: {
width: { ideal: initWidth },
height: { ideal: initHeight },
facingMode: 'environment'
}
};
try {
const stream = await navigator.mediaDevices.getUserMedia(constraints);
video.srcObject = stream;
} catch (err) {
console.error('FATAL: Could not get camera stream:', err);
document.getElementById('detections').innerHTML = `**SYSTEM ERROR**: Visual Sensor Feed Unavailable. (${err.message}). Access Denied or Hardware Error.`;
return; // Stop if camera fails
}
// Wait for video metadata to load to get correct dimensions
video.onloadedmetadata = () => {
console.log(`Video metadata loaded. Native size: ${video.videoWidth}x${video.videoHeight}`);
resizeCanvas(); // Set initial canvas size
// Initialize ImageCapture after stream is ready
try {
const track = video.srcObject.getVideoTracks()[0];
imageProcessor = new ImageCapture(track);
console.log('ImageCapture initialized.');
if (isNightVision) {
enhanceNightVision();
}
} catch (err) {
console.error('Error initializing ImageCapture:', err);
imageProcessor = null;
}
if (isSwitchingResolution) {
isSwitchingResolution = false;
if (resumeDetectionAfterResolution) {
isDetecting = true;
resumeDetectionAfterResolution = false;
console.log('Detection resumed after resolution switch.');
}
}
// Start detection loop only after everything is set up
if (!detectionLoopStarted) {
requestAnimationFrame(detect);
detectionLoopStarted = true;
}
};
// Handle window resize
window.addEventListener('resize', resizeCanvas);
// Handle model selection changes
modelSelector.addEventListener('change', handleModelChange);
// Handle resolution changes
document.getElementById('resolutionSelector').addEventListener('change', async () => {
const [w, h] = document.getElementById('resolutionSelector').value.split('x').map(Number);
console.log(`Switching resolution to: ${w}x${h}`);
isSwitchingResolution = true;
resumeDetectionAfterResolution = isDetecting;
isDetecting = false;
ctx.clearRect(0, 0, canvas.width, canvas.height);
document.getElementById('detections').innerHTML = `SYSTEM RECONFIG: Switching stream to ${w}x${h}...`;
document.getElementById('objects').textContent = 'SYSTEM STATUS: RECONFIGURING CAMERA';
if (video.srcObject) {
video.srcObject.getTracks().forEach(track => track.stop());
}
try {
const stream = await navigator.mediaDevices.getUserMedia({
video: { width: { ideal: w }, height: { ideal: h }, facingMode: 'environment' }
});
video.srcObject = stream;
} catch (err) {
console.error('Error changing resolution:', err);
isSwitchingResolution = false;
if (resumeDetectionAfterResolution) {
isDetecting = true;
resumeDetectionAfterResolution = false;
}
}
});
// Added: Handle keyboard input for controls toggle
window.addEventListener('keydown', handleKeyPress);
}
function toggleNightVision() {
isNightVision = !isNightVision;
video.className = isNightVision ? 'night-vision' : '';
if (isNightVision && imageProcessor) {
enhanceNightVision();
} else if (!isNightVision && imageProcessor) {
resetEnhancements();
}
}
function toggleDetection() {
isDetecting = !isDetecting;
console.log(`Detection toggled: ${isDetecting ? 'ACTIVE' : 'INACTIVE'}`);
if (!isDetecting) {
// Clear canvas and info when stopping detection
ctx.clearRect(0, 0, canvas.width, canvas.height);
document.getElementById('objects').textContent = 'SYSTEM STATUS: DETECTION DISABLED';
document.getElementById('detections').innerHTML = 'Detection Protocol Inactive.';
// Reset FPS counter
lastTime = performance.now();
frameCount = 0;
document.getElementById('fps').textContent = 'FPS: 0';
} else {
// Reset FPS counter when starting
lastTime = performance.now();
frameCount = 0;
}
}
async function detect() {
// Ensure model is loaded and detection is enabled
// TFJS models require predictFunction; YOLO uses its own detect callback path.
const modelType = modelConfigs[currentModelName]?.type;
const canRunDetection =
modelType === 'yolo-tfjs-vision'
? Boolean(isDetecting && model && yoloVisionInstance && drawFunction)
: Boolean(isDetecting && model && predictFunction && drawFunction);
if (canRunDetection) {
if (isSwitchingResolution) {
requestAnimationFrame(detect);
return;
}
// Stream can temporarily report 0x0 while camera renegotiates.
// Skip inference/draw on invalid frames to avoid TFJS texture errors.
const hasValidVideoFrame =
video &&
video.readyState >= HTMLMediaElement.HAVE_CURRENT_DATA &&
video.videoWidth > 0 &&
video.videoHeight > 0;
if (!hasValidVideoFrame) {
const now = performance.now();
if (now - lastDimensionWarningAt > 2000) {
console.warn('Detection skipped: video frame not ready yet.', {
readyState: video?.readyState,
videoWidth: video?.videoWidth,
videoHeight: video?.videoHeight
});
lastDimensionWarningAt = now;
}
requestAnimationFrame(detect);
return;
}
// Calculate FPS
const now = performance.now();
frameCount++;
if (now - lastTime >= 1000) {
document.getElementById('fps').textContent = `FPS: ${frameCount}`;
frameCount = 0;
lastTime = now;
}
const highRes = video.videoWidth > 800;
if (highRes) {
inferenceSkipCounter++;
if (inferenceSkipCounter % 2 !== 0) {
requestAnimationFrame(detect);
return; // Skip inference and drawing clear this frame to throttle
}
}
// --- Coordinate Transformation Calculations (Simplified for drawing) ---
const vW = canvas.width; // Canvas buffer width (should match video.videoWidth)
const vH = canvas.height; // Canvas buffer height (should match video.videoHeight)
const cW = canvas.clientWidth; // Displayed canvas width
const cH = canvas.clientHeight; // Displayed canvas height
// Check for valid dimensions
if (vW === 0 || vH === 0 || cW === 0 || cH === 0) {
if (now - lastDimensionWarningAt > 2000) {
console.warn("Invalid dimensions for detection/drawing.", { vW, vH, cW, cH });
lastDimensionWarningAt = now;
}
requestAnimationFrame(detect); // Try again next frame
return;
}
// Scaling factors for drawing directly on the canvas buffer
// Assumes canvas buffer has same aspect ratio as native video
const canvasScaleX = vW / video.videoWidth; // Should be 1 if canvas.width == video.videoWidth
const canvasScaleY = vH / video.videoHeight;// Should be 1 if canvas.height == video.videoHeight
// Calculate scaling and offset for object-fit: cover (visual display only)
const vScale = Math.max(cW / vW, cH / vH);
const sW = vW * vScale; // Scaled video width in display
const sH = vH * vScale; // Scaled video height in display
const videoOffsetX = (cW - sW) / 2; // Visual offset X
const videoOffsetY = (cH - sH) / 2; // Visual offset Y
// Perform prediction using the current model's function
try {
// --- Prediction Step ---
let predictions;
const modelType = modelConfigs[currentModelName].type;
if (modelType === 'tfjs') {
// TFJS models take the video element directly
predictions = await predictFunction(video);
} else if (modelType === 'transformersjs') {
// Transformers.js pipeline logic removed
console.error("Transformers.js model type encountered but model was removed.");
predictions = [];
} else if (modelType === 'yolo-tfjs-vision' && yoloVisionInstance && model && drawFunction) {
// Run YOLO inference sequentially; overlapping detect() calls can starve callbacks.
let callbackFired = false;
await yoloVisionInstance.detect(video, model, canvas, (detections) => {
callbackFired = true;
if (!yoloDebugLogged) {
console.log('YOLO callback sample:', {
boxesLength: detections?.boxes?.length ?? 0,
scoresLength: detections?.scores?.length ?? 0,
firstBox: detections?.boxes?.slice?.(0, 4) ?? []
});
yoloDebugLogged = true;
}
// Call the specific drawing function for this model type inside the callback.
drawFunction(detections, ctx, vW, vH, cW, cH, vScale, sW, sH, videoOffsetX, videoOffsetY, canvasScaleX, canvasScaleY);
if (isDetecting) drawGlitches(ctx, vW, vH);
});
if (!callbackFired) {
// Keep canvas untouched; library may have rendered internally.
document.getElementById('detections').innerHTML = 'SYSTEM SCANNING [YOLO-V]: CALLBACK NOT FIRED';
}
// Skip the TFJS drawing path below for YOLO.
requestAnimationFrame(detect);
return;
} else {
console.error("Unknown model type or configuration error for prediction:", modelType);
predictions = []; // Prevent further errors
}
// --- Drawing Step --- (Only for TFJS models now)
if (modelType === 'tfjs') {
drawFunction(predictions, ctx, vW, vH, cW, cH, vScale, sW, sH, videoOffsetX, videoOffsetY, canvasScaleX, canvasScaleY);
// --- Draw Visual Glitches --- (Only for TFJS models now)
if (isDetecting) { // Only draw glitches when detection is active
drawGlitches(ctx, vW, vH);
}
}
} catch (error) {
console.error("Error during prediction or drawing:", error);
// Optionally display error on screen
ctx.clearRect(0, 0, vW, vH);
ctx.fillStyle = '#0078D4'; // Updated blue for error message
ctx.font = '14px "Courier New", Courier, monospace';
ctx.fillText(`**CRITICAL ERROR**: ${error.message}`, 10, 20);
isDetecting = false; // Stop detection on error maybe?
document.getElementById('objects').textContent = 'SYSTEM STATUS: CRITICAL FAILURE';
document.getElementById('detections').innerHTML = `Error: ${error.message}. Detection Halted.`;
}
} else if (!isDetecting) {
// If detection is off but loop is running, do nothing computationally expensive
// Canvas clearing is handled in toggleDetection
} else if (!model) {
// Model not loaded, maybe show a message
ctx.fillStyle = 'orange';
ctx.font = '14px "Courier New", Courier, monospace';
ctx.fillText(`SYSTEM WARNING: Analysis Module Not Loaded.`, 10, 20);
}
requestAnimationFrame(detect); // Continue the loop
}
// Start application (using top-level await now possible because script is type module)
await init();
// --- Make functions globally available for onclick handlers ---
window.toggleNightVision = toggleNightVision;
window.toggleDetection = toggleDetection;
window.toggleSound = toggleSound;
window.toggleHideUI = toggleHideUI;
window.handleModelChange = handleModelChange; // Also needed if you want to call it manually or from other contexts
// --- Image Enhancement Functions ---
// Moved ImageCapture initialization to init()
async function enhanceNightVision() {
if (!imageProcessor) {
console.log('Image processor not available for enhancement.');
return;
}
// Check if capabilities exist before trying to set them
try {
const capabilities = await imageProcessor.getPhotoCapabilities();
const options = {};
if (capabilities.brightness && capabilities.brightness.max !== undefined) {
options.brightness = capabilities.brightness.max;
}
if (capabilities.contrast && capabilities.contrast.max !== undefined) {
options.contrast = capabilities.contrast.max;
}
if (capabilities.saturation && capabilities.saturation.min !== undefined) {
options.saturation = capabilities.saturation.min; // Desaturate
}
if (capabilities.sharpness && capabilities.sharpness.max !== undefined) {
options.sharpness = capabilities.sharpness.max;
}
// Keep manual exposure attempt if desired
// options.exposureMode = 'manual';
// options.exposureCompensation = 2;
// options.whiteBalanceMode = 'manual'; // Manual WB might not be ideal
if (Object.keys(options).length > 0) {
console.log('Applying night vision enhancements:', options);
await imageProcessor.setOptions(options);
} else {
console.log('No specific night vision enhancements applicable/supported.');
}
} catch (err) {
console.warn('Could not get capabilities or set night vision enhancements:', err);
}
}
// Function to reset enhancements when night vision is turned off
async function resetEnhancements() {
if (!imageProcessor) return;
try {
const capabilities = await imageProcessor.getPhotoCapabilities();
const options = {};
// Reset to default or neutral values
if (capabilities.brightness && capabilities.brightness.step !== undefined) {
// Assuming default is midway or 0 if min/max span across 0
options.brightness = (capabilities.brightness.min + capabilities.brightness.max) / 2;
}
if (capabilities.contrast && capabilities.contrast.step !== undefined) {
options.contrast = 1; // Typical default contrast
}
if (capabilities.saturation && capabilities.saturation.step !== undefined) {
options.saturation = 1; // Default saturation is usually 1
}
if (capabilities.sharpness && capabilities.sharpness.step !== undefined) {
options.sharpness = (capabilities.sharpness.min + capabilities.sharpness.max) / 2; // Midpoint default
}
// Reset other modes if they were changed
// options.exposureMode = 'continuous';
// options.whiteBalanceMode = 'continuous';
if (Object.keys(options).length > 0) {
console.log('Resetting image enhancements:', options);
await imageProcessor.setOptions(options);
} else {
console.log('No specific enhancements to reset or not supported.');
}
console.log('Image enhancements reset attempt finished.');
} catch (err) {
console.warn('Could not reset image enhancements:', err);
}
}
// --- Glitch Drawing Function ---
function drawGlitches(ctx, width, height) {
// Only draw glitches occasionally
if (Math.random() < 0.08) { // 8% chance per frame
const numLines = Math.floor(Math.random() * 4) + 1; // 1 to 4 lines
ctx.fillStyle = 'rgba(0, 120, 212, 0.15)'; // Faint blue, slightly more visible
for (let i = 0; i < numLines; i++) {
const y = Math.random() * height;
const h = Math.random() * 3 + 1; // Line height 1-4 px
ctx.fillRect(0, y, width, h);
}
// Optional: Add a rare full-width block glitch
if (Math.random() < 0.01) { // 1% chance inside the 8% chance
const y = Math.random() * height * 0.8; // Position in top 80%
const h = Math.random() * height * 0.1 + 10; // Height 10px + up to 10% of canvas height
ctx.fillStyle = 'rgba(0, 70, 150, 0.3)'; // Darker blue block
ctx.fillRect(0, y, width, h);
}
}
}
// --- Sound Control ---
function toggleSound() {
isSoundEnabled = !isSoundEnabled;
const button = document.getElementById('soundToggle');
button.textContent = `Sound FX: ${isSoundEnabled ? 'ON' : 'OFF'}`;
console.log(`Sound FX toggled: ${isSoundEnabled ? 'ACTIVE' : 'MUTED'}`);
if (!detectionSoundElement) {
console.error("Detection sound element not found!");
button.textContent = 'Sound FX: ERROR';
button.disabled = true;
return;
}
// --- Robust Audio Context Unlocking ---
// Needs to happen in direct response to user interaction (this click)
if (isSoundEnabled && !soundInitialized) {
console.log("Attempting to initialize audio context...");
// Ensure the element isn't muted from previous state
detectionSoundElement.muted = false;
detectionSoundElement.volume = 0.01; // Play very quietly for unlock
const playPromise = detectionSoundElement.play();
if (playPromise !== undefined) {
playPromise.then(_ => {
// Playback started successfully (even if silent)
detectionSoundElement.pause(); // Stop the unlock sound immediately
detectionSoundElement.currentTime = 0;
detectionSoundElement.volume = 1.0; // Reset volume for actual use
soundInitialized = true;
console.log("Audio context initialized successfully.");
}).catch(error => {
console.error(`Audio context initialization failed: ${error.name} - ${error.message}`);
console.warn("Sound FX may not work due to browser restrictions or errors.");
// Optionally disable the button or revert state
button.textContent = 'Sound FX: ERROR';
isSoundEnabled = false; // Revert state as it failed
// Keep button enabled to allow another try?
// button.disabled = true;
});
} else {
// Some browsers might not return a promise if it can't play
console.warn("Audio play() did not return a promise. Context might not be initialized.");
// We can assume it might have worked or might fail later.
// Let's try setting soundInitialized tentatively, actual sounds might still fail.
soundInitialized = true;
detectionSoundElement.volume = 1.0; // Reset volume
}
}
// --- End Audio Context Unlocking ---
// If turning sound OFF, pause any currently playing sound
if (!isSoundEnabled) {
detectionSoundElement.pause();
detectionSoundElement.currentTime = 0;
}
}
// Added: Function to handle key presses
function handleKeyPress(event) {
// Check if 'h' key is pressed (lowercase or uppercase)
// Ensure focus isn't on an input/select element
if (event.key === 'h' || event.key === 'H') {
if (document.activeElement.tagName !== 'SELECT' && document.activeElement.tagName !== 'INPUT' && document.activeElement.tagName !== 'BUTTON') {
controlsElement.classList.toggle('hidden');
detectionInfoElement.classList.toggle('hidden'); // Also toggle detection info
console.log('UI visibility toggled with key press.');
}
}
}
function toggleHideUI() {
controlsElement.classList.toggle('hidden');
detectionInfoElement.classList.toggle('hidden');
console.log('UI visibility toggled with button press.');
}
</script>
</body>
</html>