From c7622c36028b9e28202aa2df9964263e66fd7a23 Mon Sep 17 00:00:00 2001 From: Justin Georgi Date: Thu, 18 Jul 2024 20:51:27 -0700 Subject: [PATCH] Add basic worker function Signed-off-by: Justin Georgi --- src/js/detect-worker.js | 260 ++++++++++++++++++++++++++++++++++++++++ src/pages/detect.vue | 29 +++-- 2 files changed, 282 insertions(+), 7 deletions(-) create mode 100644 src/js/detect-worker.js diff --git a/src/js/detect-worker.js b/src/js/detect-worker.js new file mode 100644 index 0000000..97b154e --- /dev/null +++ b/src/js/detect-worker.js @@ -0,0 +1,260 @@ +import * as tf from '@tensorflow/tfjs' +import { f7 } from 'framework7-vue' + +let model = null + +self.onconnect = (e) => { + const port = e.ports[0]; + + port.onmessage = function (e) { + switch (e.data.call) { + case 'loadModel': + loadModel('.' + e.data.weights,e.data.preload).then(() => { + port.postMessage({success: true}) + }).catch((err) => { + port.postMessage({error: true, message: err.message}) + }) + break + default: + console.log('Worker message incoming:') + console.log(e) + port.postMessage({result1: 'First result', result2: 'Second result'}) + break + } + } + + port.start() +} + +async function loadModel(weights, preload) { + if (model && model.modelURL == weights) { + return model + } else if (model) { + tf.dispose(model) + } + model = await tf.loadGraphModel(weights) + const [modelWidth, modelHeight] = model.inputs[0].shape.slice(1, 3) + /***************** + * If preloading then run model + * once on fake data to preload + * weights for a faster response + *****************/ + if (preload) { + const dummyT = tf.ones([1,modelWidth,modelHeight,3]) + model.predict(dummyT) + } + return model +} + +async function localDetect(imageData) { + console.time('pre-process') + const [modelWidth, modelHeight] = model.inputs[0].shape.slice(1, 3) + let gTense = null + const input = tf.tidy(() => { + gTense = tf.image.rgbToGrayscale(tf.image.resizeBilinear(tf.browser.fromPixels(imageData), [modelWidth, modelHeight])).div(255.0).expandDims(0) + return tf.concat([gTense,gTense,gTense],3) + }) + tf.dispose(gTense) + console.timeEnd('pre-process') + + console.time('run prediction') + const res = model.predict(input) + const tRes = tf.transpose(res,[0,2,1]) + const rawRes = tRes.arraySync()[0] + console.timeEnd('run prediction') + + console.time('post-process') + const outputSize = res.shape[1] + let rawBoxes = [] + let rawScores = [] + + for (var i = 0; i < rawRes.length; i++) { + var getScores = rawRes[i].slice(4) + if (getScores.every( s => s < .05)) { continue } + var getBox = rawRes[i].slice(0,4) + var boxCalc = [ + (getBox[0] - (getBox[2] / 2)) / modelWidth, + (getBox[1] - (getBox[3] / 2)) / modelHeight, + (getBox[0] + (getBox[2] / 2)) / modelWidth, + (getBox[1] + (getBox[3] / 2)) / modelHeight, + ] + rawBoxes.push(boxCalc) + rawScores.push(getScores) + } + + if (rawBoxes.length > 0) { + const tBoxes = tf.tensor2d(rawBoxes) + let tScores = null + let resBoxes = null + let validBoxes = [] + let structureScores = null + let boxes_data = [] + let scores_data = [] + let classes_data = [] + for (var c = 0; c < outputSize - 4; c++) { + structureScores = rawScores.map(x => x[c]) + tScores = tf.tensor1d(structureScores) + resBoxes = await tf.image.nonMaxSuppressionAsync(tBoxes,tScores,10,0.5,.05) + validBoxes = resBoxes.dataSync() + tf.dispose(resBoxes) + if (validBoxes) { + boxes_data.push(...rawBoxes.filter( (_, idx) => validBoxes.includes(idx))) + var outputScores = structureScores.filter( (_, idx) => validBoxes.includes(idx)) + scores_data.push(...outputScores) + classes_data.push(...outputScores.fill(c)) + } + } + + validBoxes = [] + tf.dispose(tBoxes) + tf.dispose(tScores) + tf.dispose(tRes) + const valid_detections_data = classes_data.length + var output = { + detections: [] + } + for (var i =0; i < valid_detections_data; i++) { + var [dLeft, dTop, dRight, dBottom] = boxes_data[i] + output.detections.push({ + "top": dTop, + "left": dLeft, + "bottom": dBottom, + "right": dRight, + "label": this.detectorLabels[classes_data[i]].name, + "confidence": scores_data[i] * 100 + }) + } + } + tf.dispose(res) + tf.dispose(input) + console.timeEnd('post-process') + + return output || { detections: [] } +} + +function getRemoteLabels() { + var self = this + var modelURL = `http://${this.serverSettings.address}:${this.serverSettings.port}/detectors` + var xhr = new XMLHttpRequest() + xhr.open("GET", modelURL) + xhr.setRequestHeader('Content-Type', 'application/json') + xhr.timeout = 10000 + xhr.ontimeout = this.remoteTimeout + xhr.onload = function () { + if (this.status !== 200) { + console.log(xhr.response) + const errorResponse = JSON.parse(xhr.response) + f7.dialog.alert(`ALVINN has encountered an error: ${errorResponse.error}`) + return + } + var detectors = JSON.parse(xhr.response).detectors + var findLabel = detectors + .find( d => { return d.name == self.detectorName } )?.labels + .filter( l => { return l != "" } ).sort() + .map( l => { return {'name': l, 'detect': true} } ) + self.detectorLabels = findLabel || [] + } + xhr.onerror = function (e) { + f7.dialog.alert('ALVINN has encountered an unknown server error') + return + } + + xhr.send() +} + +function remoteDetect() { + var self = this + var modelURL = `http://${this.serverSettings.address}:${this.serverSettings.port}/detect` + var xhr = new XMLHttpRequest() + xhr.open("POST", modelURL) + xhr.timeout = 10000 + xhr.ontimeout = this.remoteTimeout + xhr.setRequestHeader('Content-Type', 'application/json') + xhr.onload = function () { + self.detecting = false + if (this.status !== 200) { + console.log(xhr.response) + const errorResponse = JSON.parse(xhr.response) + f7.dialog.alert(`ALVINN has encountered an error: ${errorResponse.error}`) + return; + } + self.resultData = JSON.parse(xhr.response) + self.uploadDirty = true + } + + var doodsData = { + "detector_name": this.detectorName, + "detect": { + "*": 1 + }, + "data": this.imageView.src.split(',')[1] + } + + xhr.send(JSON.stringify(doodsData)) +} + +function remoteTimeout () { + this.detecting = false + f7.dialog.alert('No connection to remote ALVINN instance. Please check app settings.') +} + +async function videoFrameDetect (vidData) { + await this.loadModel(this.miniLocation) + const [modelWidth, modelHeight] = model.inputs[0].shape.slice(1, 3) + const imCanvas = this.$refs.image_cvs + const imageCtx = imCanvas.getContext("2d") + const target = this.$refs.target_image + await tf.nextFrame(); + imCanvas.width = imCanvas.clientWidth + imCanvas.height = imCanvas.clientHeight + imageCtx.clearRect(0,0,imCanvas.width,imCanvas.height) + var imgWidth + var imgHeight + const imgAspect = vidData.width / vidData.height + const rendAspect = imCanvas.width / imCanvas.height + if (imgAspect >= rendAspect) { + imgWidth = imCanvas.width + imgHeight = imCanvas.width / imgAspect + } else { + imgWidth = imCanvas.height * imgAspect + imgHeight = imCanvas.height + } + while (this.videoAvailable) { + console.time('frame-process') + try { + const input = tf.tidy(() => { + return tf.image.resizeBilinear(tf.browser.fromPixels(vidData), [modelWidth, modelHeight]).div(255.0).expandDims(0) + }) + const res = model.predict(input) + const rawRes = tf.transpose(res,[0,2,1]).arraySync()[0] + + let rawCoords = [] + if (rawRes) { + for (var i = 0; i < rawRes.length; i++) { + let getScores = rawRes[i].slice(4) + if (getScores.some( s => s > .5)) { + let foundTarget = rawRes[i].slice(0,2) + foundTarget.push(Math.max(...getScores)) + rawCoords.push(foundTarget) + } + } + + imageCtx.clearRect(0,0,imCanvas.width,imCanvas.height) + for (var coord of rawCoords) { + console.log(`x: ${coord[0]}, y: ${coord[1]}`) + let pointX = (imCanvas.width - imgWidth) / 2 + (coord[0] / modelWidth) * imgWidth -5 + let pointY = (imCanvas.height - imgHeight) / 2 + (coord[1] / modelHeight) * imgHeight -5 + imageCtx.globalAlpha = coord[2] + imageCtx.drawImage(target, pointX, pointY, 20, 20) + } + } + tf.dispose(input) + tf.dispose(res) + tf.dispose(rawRes) + } catch (e) { + console.log(e) + } + console.timeEnd('frame-process') + await tf.nextFrame(); + } +} \ No newline at end of file diff --git a/src/pages/detect.vue b/src/pages/detect.vue index 8e83f6a..7520fdc 100644 --- a/src/pages/detect.vue +++ b/src/pages/detect.vue @@ -214,18 +214,33 @@ if (loadServerSettings) this.serverSettings = JSON.parse(loadServerSettings) }, mounted () { + const mountWorker = new SharedWorker('../js/detect-worker.js',{type: 'module'}) + mountWorker.port.onmessage = (eMount) => { + self = this + if (eMount.data.error) { + console.log(eMount.data.message) + f7.dialog.alert(`ALVINN AI model error: ${eMount.data.message}`) + } + console.log(eMount) + console.log('Model loading complete.') + console.log(self.imageLoadMode) + self.modelLoading = false + } + if (this.serverSettings && this.serverSettings.use) { this.getRemoteLabels() this.modelLoading = false } else { this.modelLoading = true - this.loadModel(this.modelLocation, true).then(() => { - this.modelLoading = false - }).catch((e) => { - console.log(e.message) - f7.dialog.alert(`ALVINN AI model error: ${e.message}`) - this.modelLoading = false - }) + console.log('Loading model...') + mountWorker.port.postMessage({call: 'loadModel', weights: this.modelLocation, preload: true}) +// this.loadModel(this.modelLocation, true).then(() => { +// this.modelLoading = false +// }).catch((e) => { +// console.log(e.message) +// f7.dialog.alert(`ALVINN AI model error: ${e.message}`) +// this.modelLoading = false +// }) } window.onresize = (e) => { if (this.$refs.image_cvs) this.selectChip('redraw') } },