-
-
+
@@ -147,7 +148,9 @@
uploadUid: null,
uploadDirty: false,
modelLocation: '',
+ miniLocation: '',
modelLoading: true,
+ reloadModel: false,
videoDeviceAvailable: false,
videoAvailable: false,
cameraStream: null
@@ -163,17 +166,23 @@
case 'thorax':
this.activeRegion = 0
this.detectorName = 'thorax'
- //this.classesList = thoraxClasses
/* VITE setting */
this.modelLocation = `../models/thorax${this.otherSettings.mini ? '-mini' : ''}/model.json`
+ this.miniLocation = `../models/thorax-mini/model.json`
/* PWA Build setting */
//this.modelLocation = `./models/thorax${this.otherSettings.mini ? '-mini' : ''}/model.json`
this.modelLocationCordova = `https://localhost/models/thorax${this.otherSettings.mini ? '-mini' : ''}/model.json`
break;
case 'abdomen':
this.activeRegion = 1
- this.detectorName = 'combined'
- break;
+ this.detectorName = 'abdomen'
+ /* VITE setting */
+ this.modelLocation = `../models/abdomen${this.otherSettings.mini ? '-mini' : ''}/model.json`
+ this.miniLocation = `../models/abdomen-mini/model.json`
+ /* PWA Build setting */
+ //this.modelLocation = `./models/abdomen${this.otherSettings.mini ? '-mini' : ''}/model.json`
+ this.modelLocationCordova = `https://localhost/models/abdomen${this.otherSettings.mini ? '-mini' : ''}/model.json`
+ break;
case 'limbs':
this.activeRegion = 2
this.detectorName = 'defaultNew'
@@ -195,7 +204,7 @@
this.modelLoading = false
} else {
this.modelLoading = true
- this.loadModel(this.isCordova ? this.modelLocationCordova : this.modelLocation).then(() => {
+ this.loadModel(this.isCordova ? this.modelLocationCordova : this.modelLocation, true).then(() => {
this.modelLoading = false
}).catch((e) => {
console.log(e.message)
@@ -253,7 +262,11 @@
chipGradient (confVal) {
return `--chip-media-background: hsl(${confVal / 100 * 120}deg 100% 50%)`
},
- setData () {
+ async setData () {
+ if (this.reloadModel) {
+ await this.loadModel(this.isCordova ? this.modelLocationCordova : this.modelLocation)
+ this.reloadModel = false
+ }
if (this.serverSettings && this.serverSettings.use) {
this.remoteDetect()
} else {
@@ -284,7 +297,20 @@
}
if (mode == "camera") {
this.videoAvailable = await this.openCamera(this.$refs.image_container)
- if (this.videoAvailable) { return }
+ if (this.videoAvailable) {
+ this.imageLoaded = false
+ this.imageView = null
+ this.$refs.image_cvs.style['background-image'] = 'none'
+ this.resultData = {}
+ var trackDetails = this.cameraStream.getVideoTracks()[0].getSettings()
+ var vidElement = this.$refs.vid_viewer
+ vidElement.width = trackDetails.width
+ vidElement.height = trackDetails.height
+ if (!this.otherSettings.disableVideo) {
+ this.videoFrameDetect(vidElement)
+ }
+ return
+ }
}
if (mode == 'sample') {
f7.dialog.create({
@@ -350,6 +376,7 @@
if (this.videoAvailable) {
this.closeCamera()
this.detecting = true
+ this.reloadModel = true
resolve(searchImage)
} else if (this.isCordova && this.imageLoadMode == "camera") {
this.detecting = true
@@ -421,7 +448,7 @@
this.selectChip(findBox >= 0 ? this.resultData.detections[findBox].resultIndex : this.selectedChip)
},
box2cvs(boxInput) {
- if (!boxInput) return []
+ if (!boxInput || boxInput.length == 0) return []
const boxList = boxInput.length ? boxInput : [boxInput]
const [imCanvas, imageCtx] = this.resetView()
var imgWidth
diff --git a/src/pages/detection-mixin.js b/src/pages/detection-mixin.js
index 46eb871..2dbd615 100644
--- a/src/pages/detection-mixin.js
+++ b/src/pages/detection-mixin.js
@@ -5,11 +5,23 @@ var model = null
export default {
methods: {
- async loadModel(weights) {
+ async loadModel(weights, preload) {
+ if (model && model.modelURL == weights) {
+ return model
+ } else if (model) {
+ model.dispose()
+ }
model = await tf.loadGraphModel(weights)
const [modelWidth, modelHeight] = model.inputs[0].shape.slice(1, 3)
- const dummyT = tf.ones([1,modelWidth,modelHeight,3])
- model.predict(dummyT) //Run model once to preload weights for better response time
+ /*****************
+ * If preloading then run model
+ * once on fake data to preload
+ * weights for a faster response
+ *****************/
+ if (preload) {
+ const dummyT = tf.ones([1,modelWidth,modelHeight,3])
+ model.predict(dummyT)
+ }
return model
},
async localDetect(imageData) {
@@ -150,7 +162,60 @@ export default {
remoteTimeout () {
this.detecting = false
f7.dialog.alert('No connection to remote ALVINN instance. Please check app settings.')
- }
+ },
+ async videoFrameDetect (vidData) {
+ await this.loadModel(this.miniLocation)
+ const [modelWidth, modelHeight] = model.inputs[0].shape.slice(1, 3)
+ const imCanvas = this.$refs.image_cvs
+ const imageCtx = imCanvas.getContext("2d")
+ const target = this.$refs.target_image
+ await tf.nextFrame();
+ imCanvas.width = imCanvas.clientWidth
+ imCanvas.height = imCanvas.clientHeight
+ imageCtx.clearRect(0,0,imCanvas.width,imCanvas.height)
+ var imgWidth
+ var imgHeight
+ const imgAspect = vidData.clientWidth / vidData.clientHeight
+ const rendAspect = imCanvas.width / imCanvas.height
+ if (imgAspect >= rendAspect) {
+ imgWidth = imCanvas.width
+ imgHeight = imCanvas.width / imgAspect
+ } else {
+ imgWidth = imCanvas.height * imgAspect
+ imgHeight = imCanvas.height
+ }
+ while (this.videoAvailable) {
+ console.time('frame-process')
+ try {
+ const input = tf.tidy(() => {
+ return tf.image.resizeBilinear(tf.browser.fromPixels(vidData), [modelWidth, modelHeight]).div(255.0).expandDims(0)
+ })
+ const res = model.predict(input)
+ const rawRes = tf.transpose(res,[0,2,1]).arraySync()[0]
+ let rawCoords = []
+ if (rawRes) {
+ for (var i = 0; i < rawRes.length; i++) {
+ var getScores = rawRes[i].slice(4)
+ if (getScores.some( s => s > .5)) {
+ rawCoords.push(rawRes[i].slice(0,2))
+ }
+ }
+
+ imageCtx.clearRect(0,0,imCanvas.width,imCanvas.height)
+ for (var coord of rawCoords) {
+ console.log(`x: ${coord[0]}, y: ${coord[1]}`)
+ let pointX = (imCanvas.width - imgWidth) / 2 + (coord[0] / modelWidth) * imgWidth -5
+ let pointY = (imCanvas.height - imgHeight) / 2 + (coord[1] / modelHeight) * imgHeight -5
+ imageCtx.drawImage(target, pointX, pointY, 20, 20)
+ }
+ }
+ } catch (e) {
+ console.log(e)
+ }
+ console.timeEnd('frame-process')
+ await tf.nextFrame();
+ }
+ }
}
}
\ No newline at end of file
diff --git a/src/pages/settings.vue b/src/pages/settings.vue
index 5cec503..c249f97 100644
--- a/src/pages/settings.vue
+++ b/src/pages/settings.vue
@@ -27,6 +27,10 @@
Enable demo mode
+
+ Disable video estimates
+
+
Use external server
diff --git a/src/pages/specs.vue b/src/pages/specs.vue
index 8277a26..42fdc7f 100644
--- a/src/pages/specs.vue
+++ b/src/pages/specs.vue
@@ -13,7 +13,8 @@
-
+
+
@@ -39,7 +40,8 @@
return {
thoraxDetails: {},
miniThoraxDetails: {},
- abdomenDetails: { "version": "N/A" },
+ abdomenDetails: {},
+ miniAbdomenDetails: {},
limbsDetails: { "version": "N/A" },
headneckDetails: { "version": "N/A" },
alvinnVersion: store().getVersion,
@@ -60,7 +62,14 @@
.then((mod) => {
this.miniThoraxDetails = mod.default
})
-
+ import('../models/abdomen/descript.json')
+ .then((mod) => {
+ this.abdomenDetails = mod.default
+ })
+ import('../models/abdomen-mini/descript.json')
+ .then((mod) => {
+ this.miniAbdomenDetails = mod.default
+ })
},
methods: {
}