Optimize reactive vue data variables
All checks were successful
Build Dev PWA / Build-PWA (push) Successful in 37s

Signed-off-by: Justin Georgi <justin.georgi@gmail.com>
This commit is contained in:
2024-10-05 16:30:33 -07:00
parent 9e90823858
commit a98577e206
4 changed files with 82 additions and 80 deletions

View File

@@ -3,11 +3,11 @@ import { f7 } from 'framework7-vue'
export default {
methods: {
async openCamera(imContain) {
var cameraLoaded = false
let cameraLoaded = false
const devicesList = await navigator.mediaDevices.enumerateDevices()
this.videoDeviceAvailable = devicesList.some( d => d.kind == "videoinput")
if (this.videoDeviceAvailable) {
var vidConstraint = {
let videoDeviceAvailable = devicesList.some( d => d.kind == "videoinput")
if (videoDeviceAvailable) {
let vidConstraint = {
video: {
width: {
ideal: imContain.offsetWidth
@@ -41,20 +41,20 @@ export default {
tempCtx.drawImage(vidViewer, 0, 0)
this.getImage(tempCVS.toDataURL())
},
async videoFrameDetectWorker (vidData) {
async videoFrameDetectWorker (vidData, vidWorker) {
const startDetection = () => {
createImageBitmap(vidData).then(imVideoFrame => {
this.vidWorker.postMessage({call: 'videoFrame', image: imVideoFrame}, [imVideoFrame])
vidWorker.postMessage({call: 'videoFrame', image: imVideoFrame}, [imVideoFrame])
})
}
vidData.addEventListener('resize',startDetection,{once: true})
this.vidWorker.onmessage = (eVid) => {
vidWorker.onmessage = (eVid) => {
if (eVid.data.error) {
console.log(eVid.data.message)
f7.dialog.alert(`ALVINN AI model error: ${eVid.data.message}`)
} else if (this.videoAvailable) {
createImageBitmap(vidData).then(imVideoFrame => {
this.vidWorker.postMessage({call: 'videoFrame', image: imVideoFrame}, [imVideoFrame])
vidWorker.postMessage({call: 'videoFrame', image: imVideoFrame}, [imVideoFrame])
})
if (eVid.data.coords) {
imageCtx.clearRect(0,0,imCanvas.width,imCanvas.height)

View File

@@ -2,7 +2,7 @@
<f7-page name="detect" :id="detectorName + '-detect-page'" @wheel="(e = $event) => e.preventDefault()" @touchmove="(e = $event) => e.preventDefault()">
<!-- Top Navbar -->
<f7-navbar :sliding="false" :back-link="true" back-link-url="/" back-link-force>
<f7-nav-title sliding>{{ regions[activeRegion] }}</f7-nav-title>
<f7-nav-title sliding>{{ regionTitle }}</f7-nav-title>
<f7-nav-right>
<f7-link v-if="!isCordova" :icon-only="true" tooltip="Fullscreen" :icon-f7="isFullscreen ? 'viewfinder_circle_fill' : 'viewfinder'" @click="toggleFullscreen"></f7-link>
<f7-link :icon-only="true" tooltip="ALVINN help" icon-f7="question_circle_fill" href="/help/"></f7-link>
@@ -30,7 +30,8 @@
:style="`display: ${(imageLoaded || videoAvailable) ? 'block' : 'none'}; flex: 1 1 0%; max-width: 100%; max-height: 100%; min-width: 0; min-height: 0; background-size: contain; background-position: center; background-repeat: no-repeat; z-index: 2;`"
></canvas>
<f7-link v-if="getInfoUrl && (selectedChip > -1)"
:style="`left: ${infoLinkPos.x}px; top: ${infoLinkPos.y}px; transform: translate(-50%,-50%); background: hsla(${showResults[selectedChip].confidence / 100 * 120}deg, 100%, 50%, .5)`" class="structure-info"
:style="`left: ${infoLinkPos.x}px; top: ${infoLinkPos.y}px; transform: translate(-50%,-50%); background: hsla(${showResults[selectedChip].confidence / 100 * 120}deg, 100%, 50%, .5)`"
class="structure-info"
:icon-only="true"
icon-f7="info"
target="_blank"
@@ -144,6 +145,20 @@
import detectionWorker from '@/assets/detect-worker.js?worker&inline'
const regions = ['Thorax','Abdomen/Pelvis','Limbs','Head and Neck']
let activeRegion = 4
let classesList = []
let imCvsLocation = {}
let imageLoadMode = "environment"
let serverSettings = {}
let otherSettings = {}
let modelLocation = ''
let miniLocation = ''
let reloadModel = false
let detectWorker = null
let vidWorker = null
let canvasMoving = false
export default {
mixins: [submitMixin, detectionMixin, cameraMixin, touchMixin],
props: {
@@ -155,38 +170,24 @@
},
data () {
return {
regions: ['Thorax','Abdomen/Pelvis','Limbs','Head and Neck'],
resultData: {},
selectedChip: -1,
activeRegion: 4,
classesList: [],
imageLoaded: false,
imageView: new Image(),
imCvsLocation: {},
imageLoadMode: "environment",
detecting: false,
detectPanel: false,
showDetectSettings: false,
detectorName: '',
detectorLevel: 50,
detectorLabels: [],
serverSettings: {},
otherSettings: {},
isCordova: !!window.cordova,
secureProtocol: location.protocol == 'https:',
uploadUid: null,
uploadDirty: false,
modelLocation: '',
miniLocation: '',
modelLoading: true,
reloadModel: false,
videoDeviceAvailable: false,
videoAvailable: false,
cameraStream: null,
infoLinkPos: {},
detectWorker: null,
vidWorker: null,
canvasMoving: false,
canvasOffset: {x: 0, y: 0},
canvasZoom: 1,
structureZoomed: false,
@@ -198,44 +199,44 @@
},
created () {
let loadOtherSettings = localStorage.getItem('otherSettings')
if (loadOtherSettings) this.otherSettings = JSON.parse(loadOtherSettings)
if (loadOtherSettings) otherSettings = JSON.parse(loadOtherSettings)
this.detectorName = this.f7route.params.region
switch (this.detectorName) {
case 'thorax':
this.activeRegion = 0
activeRegion = 0
break;
case 'abdomen':
this.activeRegion = 1
activeRegion = 1
break;
case 'limbs':
this.activeRegion = 2
activeRegion = 2
break;
case 'head':
this.activeRegion = 3
activeRegion = 3
break;
}
let modelJ = `../models/${this.detectorName}${this.otherSettings.mini ? '-mini' : ''}/model.json`
let modelJ = `../models/${this.detectorName}${otherSettings.mini ? '-mini' : ''}/model.json`
let miniJ = `../models/${this.detectorName}-mini/model.json`
this.modelLocation = new URL(modelJ,import.meta.url).href
this.miniLocation = new URL(miniJ,import.meta.url).href
modelLocation = new URL(modelJ,import.meta.url).href
miniLocation = new URL(miniJ,import.meta.url).href
let classesJ = `../models/${this.detectorName}/classes.json`
fetch(new URL(classesJ,import.meta.url).href)
.then((mod) => { return mod.json() })
.then((classes) => {
this.classesList = classes
this.detectorLabels = this.classesList.map( l => { return {'name': l, 'detect': true} } )
classesList = classes
this.detectorLabels = classesList.map( l => { return {'name': l, 'detect': true} } )
})
var loadServerSettings = localStorage.getItem('serverSettings')
if (loadServerSettings) this.serverSettings = JSON.parse(loadServerSettings)
if (loadServerSettings) serverSettings = JSON.parse(loadServerSettings)
},
mounted () {
if (this.serverSettings && this.serverSettings.use) {
if (serverSettings && serverSettings.use) {
this.getRemoteLabels()
this.modelLoading = false
} else {
this.modelLoading = true
if (!this.useWorkers) {
this.loadModel(this.modelLocation, true).then(() => {
this.loadModel(modelLocation, true).then(() => {
this.modelLoading = false
}).catch((e) => {
console.log(e.message)
@@ -243,8 +244,8 @@
this.modelLoading = false
})
} else {
this.detectWorker = new detectionWorker()
this.detectWorker.onmessage = (eMount) => {
detectWorker = new detectionWorker()
detectWorker.onmessage = (eMount) => {
self = this
if (eMount.data.error) {
console.log(eMount.data.message)
@@ -252,21 +253,24 @@
}
self.modelLoading = false
}
this.vidWorker = new detectionWorker()
this.vidWorker.onmessage = (eMount) => {
vidWorker = new detectionWorker()
vidWorker.onmessage = (eMount) => {
self = this
if (eMount.data.error) {
console.log(eMount.data.message)
f7.dialog.alert(`ALVINN AI nano model error: ${eMount.data.message}`)
}
}
this.detectWorker.postMessage({call: 'loadModel', weights: this.modelLocation, preload: true})
this.vidWorker.postMessage({call: 'loadModel', weights: this.miniLocation, preload: true})
detectWorker.postMessage({call: 'loadModel', weights: modelLocation, preload: true})
vidWorker.postMessage({call: 'loadModel', weights: miniLocation, preload: true})
}
}
window.onresize = (e) => { if (this.$refs.image_cvs) this.selectChip('redraw') }
},
computed: {
regionTitle () {
return regions[activeRegion]
},
message () {
if (this.modelLoading) {
return "Preparing ALVINN..."
@@ -310,7 +314,7 @@
}
},
demoEnabled () {
return this.otherSettings.demo || this.demoMode
return otherSettings.demo || this.demoMode
},
infoLinkTarget () {
if (!this.getInfoUrl) return ''
@@ -324,12 +328,12 @@
return `--chip-media-gradient: conic-gradient(from ${270 - (confFactor * 360 / 2)}deg, hsl(${confFactor * 120}deg, 100%, 50%) ${confFactor}turn, hsl(${confFactor * 120}deg, 50%, 66%) ${confFactor}turn)`
},
async setData () {
if (this.detectWorker) {
this.detectWorker.onmessage = (eDetect) => {
if (detectWorker) {
detectWorker.onmessage = (eDetect) => {
self = this
if (eDetect.data.error) {
self.detecting = false
self.resultData = {}
self.this.resultData = {}
loadFailure()
f7.dialog.alert(`ALVINN structure finding error: ${eDetect.data.message}`)
} else if (eDetect.data.success == 'detection') {
@@ -340,7 +344,7 @@
}
self.uploadDirty = true
} else if (eDetect.data.success == 'model') {
self.reloadModel = false
reloadModel = false
loadSuccess()
}
f7.utils.nextFrame(() => {
@@ -352,26 +356,26 @@
let loadSuccess = null
let loadFailure = null
let modelReloading = null
if (!this.useWorkers && this.reloadModel) {
await this.loadModel(this.modelLocation)
this.reloadModel = false
if (!this.useWorkers && reloadModel) {
await this.loadModel(modelLocation)
reloadModel = false
} else {
modelReloading = new Promise((res, rej) => {
loadSuccess = res
loadFailure = rej
if (this.reloadModel) {
this.detectWorker.postMessage({call: 'loadModel', weights: this.modelLocation})
if (reloadModel) {
detectWorker.postMessage({call: 'loadModel', weights: modelLocation})
} else {
loadSuccess()
}
})
}
if (this.serverSettings && this.serverSettings.use) {
if (serverSettings && serverSettings.use) {
this.remoteDetect()
} else if (this.useWorkers) {
Promise.all([modelReloading,createImageBitmap(this.imageView)]).then(res => {
this.detectWorker.postMessage({call: 'localDetect', image: res[1]}, [res[1]])
detectWorker.postMessage({call: 'localDetect', image: res[1]}, [res[1]])
})
} else {
createImageBitmap(this.imageView).then(res => {
@@ -399,12 +403,12 @@
}
},
async selectImage (mode) {
this.imageLoadMode = mode
imageLoadMode = mode
if (this.isCordova && mode == "camera") {
navigator.camera.getPicture(this.getImage, this.onFail, { quality: 50, destinationType: Camera.DestinationType.DATA_URL, correctOrientation: true });
return
}
if (mode == "camera" && !this.otherSettings.disableVideo) {
if (mode == "camera" && !otherSettings.disableVideo) {
this.videoAvailable = await this.openCamera(this.$refs.image_container)
if (this.videoAvailable) {
this.selectedChip = -1
@@ -417,9 +421,9 @@
vidElement.width = trackDetails.width
vidElement.height = trackDetails.height
if (!this.useWorkers) {
this.videoFrameDetect(vidElement)
this.videoFrameDetect(vidElement, miniLocation)
} else {
this.videoFrameDetectWorker(vidElement)
this.videoFrameDetectWorker(vidElement, vidWorker)
}
return
}
@@ -445,7 +449,6 @@
return clip[0].getType("image/png");
}).then(blob => {
let clipImage = URL.createObjectURL(blob);
console.log(clipImage)
this.getImage(clipImage)
}).catch(e => {
console.log(e)
@@ -530,12 +533,12 @@
imageCtx.lineWidth = 3 / this.canvasZoom
if (this.imageLoaded) {
let imageLoc = this.box2cvs({top: 0,left: 0,right: 1,bottom: 1})
this.imCvsLocation.top = imageLoc[0].cvsTop
this.imCvsLocation.left = imageLoc[0].cvsLeft
this.imCvsLocation.width = imageLoc[0].cvsRight - imageLoc[0].cvsLeft
this.imCvsLocation.height = imageLoc[0].cvsBottom - imageLoc[0].cvsTop
imCvsLocation.top = imageLoc[0].cvsTop
imCvsLocation.left = imageLoc[0].cvsLeft
imCvsLocation.width = imageLoc[0].cvsRight - imageLoc[0].cvsLeft
imCvsLocation.height = imageLoc[0].cvsBottom - imageLoc[0].cvsTop
if (drawChip) {imageCtx.globalAlpha = .5}
imageCtx.drawImage(this.imageView, 0, 0, this.imageView.width, this.imageView.height, this.imCvsLocation.left, this.imCvsLocation.top, this.imCvsLocation.width, this.imCvsLocation.height)
imageCtx.drawImage(this.imageView, 0, 0, this.imageView.width, this.imageView.height, imCvsLocation.left, imCvsLocation.top, imCvsLocation.width, imCvsLocation.height)
if (drawChip) {imageCtx.globalAlpha = 1}
}
this.structureZoomed = false
@@ -546,13 +549,13 @@
if (this.videoAvailable) {
this.closeCamera()
this.detecting = true
this.reloadModel = true
reloadModel = true
resolve(searchImage)
} else if (this.isCordova && this.imageLoadMode == "camera") {
} else if (this.isCordova && imageLoadMode == "camera") {
this.detecting = true
resolve('data:image/jpg;base64,' + searchImage)
}
if (this.imageLoadMode == 'clipboard') {
if (imageLoadMode == 'clipboard') {
this.detecting = true
resolve(searchImage)
}
@@ -561,7 +564,7 @@
this.detecting = true
resolve(reader.result)
},{once: true})
if (this.imageLoadMode == 'sample') {
if (imageLoadMode == 'sample') {
fetch(`${this.isCordova ? 'https://localhost' : '.'}/samples/${this.detectorName}-${searchImage}.jpeg`).then( resp => {
return resp.blob()
}).then(respBlob => {
@@ -590,11 +593,11 @@
imCanvas.height = imCanvas.clientHeight
const imageCtx = imCanvas.getContext("2d")
let imageLoc = this.box2cvs({top: 0,left: 0,right: 1,bottom: 1})
this.imCvsLocation.top = imageLoc[0].cvsTop
this.imCvsLocation.left = imageLoc[0].cvsLeft
this.imCvsLocation.width = imageLoc[0].cvsRight - imageLoc[0].cvsLeft
this.imCvsLocation.height = imageLoc[0].cvsBottom - imageLoc[0].cvsTop
imageCtx.drawImage(this.imageView, 0, 0, this.imageView.width, this.imageView.height, this.imCvsLocation.left, this.imCvsLocation.top, this.imCvsLocation.width, this.imCvsLocation.height)
imCvsLocation.top = imageLoc[0].cvsTop
imCvsLocation.left = imageLoc[0].cvsLeft
imCvsLocation.width = imageLoc[0].cvsRight - imageLoc[0].cvsLeft
imCvsLocation.height = imageLoc[0].cvsBottom - imageLoc[0].cvsTop
imageCtx.drawImage(this.imageView, 0, 0, this.imageView.width, this.imageView.height, imCvsLocation.left, imCvsLocation.top, imCvsLocation.width, imCvsLocation.height)
f7.utils.nextFrame(() => {
this.setData()
})
@@ -672,13 +675,13 @@
})
},
startMove() {
this.canvasMoving = true
canvasMoving = true
},
endMove() {
this.canvasMoving = false
canvasMoving = false
},
makeMove(event) {
if (this.canvasMoving) {
if (canvasMoving) {
this.canvasOffset.x += event.movementX
this.canvasOffset.y += event.movementY
this.selectChip("redraw")

View File

@@ -172,8 +172,8 @@ export default {
this.detecting = false
f7.dialog.alert('No connection to remote ALVINN instance. Please check app settings.')
},
async videoFrameDetect (vidData) {
await this.loadModel(this.miniLocation)
async videoFrameDetect (vidData, miniModel) {
await this.loadModel(miniModel)
const [modelWidth, modelHeight] = model.inputs[0].shape.slice(1, 3)
const imCanvas = this.$refs.image_cvs
const imageCtx = imCanvas.getContext("2d")

View File

@@ -25,7 +25,6 @@ export default {
moveTouch(event) {
switch (event.touches.length) {
case 1:
console.log(event)
this.canvasOffset.x += event.touches[0].clientX - this.touchPrevious.x
this.canvasOffset.y += event.touches[0].clientY - this.touchPrevious.y
this.touchPrevious = {x: event.touches[0].clientX, y: event.touches[0].clientY}