Add real-time detection to camera stream #143
14
src/assets/target.svg
Normal file
14
src/assets/target.svg
Normal file
@@ -0,0 +1,14 @@
|
||||
<?xml version="1.0" encoding="UTF-8"?>
|
||||
<!-- Created with Inkscape (http://www.inkscape.org/) -->
|
||||
<svg version="1.1" viewBox="0 0 10 10" xmlns="http://www.w3.org/2000/svg" xmlns:xlink="http://www.w3.org/1999/xlink">
|
||||
<defs>
|
||||
<filter id="filter1847" x="-.075" y="-.075" width="1.15" height="1.15" color-interpolation-filters="sRGB">
|
||||
<feGaussianBlur stdDeviation="0.3125"/>
|
||||
</filter>
|
||||
<radialGradient id="radialGradient1903" cx="5" cy="5" r="5.75" gradientUnits="userSpaceOnUse">
|
||||
<stop stop-color="#ff0" offset="0"/>
|
||||
<stop stop-color="#ff0" stop-opacity="0" offset="1"/>
|
||||
</radialGradient>
|
||||
</defs>
|
||||
<path d="m0 5 3.833274-1.166726 1.166726-3.833274 1.166726 3.833274 3.833274 1.166726-3.833274 1.166726-1.166726 3.833274-1.166726-3.833274z" color="#000000" fill="url(#radialGradient1903)" fill-rule="evenodd" filter="url(#filter1847)" opacity=".63"/>
|
||||
</svg>
|
||||
|
After Width: | Height: | Size: 870 B |
@@ -1,3 +1,5 @@
|
||||
import * as tf from '@tensorflow/tfjs'
|
||||
|
||||
export default {
|
||||
methods: {
|
||||
async openCamera(imContain) {
|
||||
@@ -5,7 +7,6 @@ export default {
|
||||
const devicesList = await navigator.mediaDevices.enumerateDevices()
|
||||
this.videoDeviceAvailable = devicesList.some( d => d.kind == "videoinput")
|
||||
if (this.videoDeviceAvailable) {
|
||||
navigator.mediaDevices.getUserMedia({video: true})
|
||||
var vidConstraint = {
|
||||
video: {
|
||||
width: {
|
||||
@@ -39,7 +40,22 @@ export default {
|
||||
this.getImage(tempCVS.toDataURL())
|
||||
},
|
||||
async videoStream () {
|
||||
//TODO
|
||||
const vidElement = this.$refs.vid_viewer
|
||||
this.videoAvailable = false
|
||||
const devicesList = await navigator.mediaDevices.enumerateDevices()
|
||||
this.videoDeviceAvailable = devicesList.some( d => d.kind == "videoinput")
|
||||
if (this.videoDeviceAvailable) {
|
||||
var vidConstraint = {
|
||||
//resizeWidth: 640,
|
||||
//resizeHeight: 640,
|
||||
facingMode: 'environment'
|
||||
}
|
||||
tf.data.webcam(vidElement, vidConstraint).then( webData => {
|
||||
this.videoAvailable = true
|
||||
this.cameraStream = vidElement.captureStream()
|
||||
this.videoFrameDetect(webData)
|
||||
})
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -6,12 +6,12 @@
|
||||
</f7-navbar>
|
||||
<f7-block class="detect-grid">
|
||||
<div class="image-container" ref="image_container">
|
||||
<canvas id="im-draw" ref="image_cvs" @click="structureClick" :style="`display: ${imageLoaded ? 'block' : 'none'}; flex: 1 1 0%; max-width: 100%; max-height: 100%; min-width: 0; min-height: 0; background-size: contain; background-position: center; background-repeat: no-repeat`" />
|
||||
<SvgIcon v-if="!imageView && !videoAvailable" :icon="f7route.params.region" fill-color="var(--avn-theme-color)" @click="selectImage" />
|
||||
<div class="vid-container" v-if="videoAvailable" style="width: 100%; height: 100%">
|
||||
<div class="vid-container" :style="`display: ${videoAvailable ? 'block' : 'none'}; position: absolute; width: 100%; height: 100%;`">
|
||||
<video id="vid-view" ref="vid_viewer" :srcObject="cameraStream" :autoPlay="true" style="width: 100%; height: 100%"></video>
|
||||
<f7-button @click="captureVidFrame()" style="position: absolute; bottom: 32px; left: 50%; transform: translateX(-50%);" fill large>Capture</f7-button>
|
||||
<f7-button @click="captureVidFrame()" style="position: absolute; bottom: 32px; left: 50%; transform: translateX(-50%); z-index: 3;" fill large>Capture</f7-button>
|
||||
</div>
|
||||
<canvas id="im-draw" ref="image_cvs" @click="structureClick" :style="`display: ${(imageLoaded || videoAvailable) ? 'block' : 'none'}; flex: 1 1 0%; max-width: 100%; max-height: 100%; min-width: 0; min-height: 0; background-size: contain; background-position: center; background-repeat: no-repeat; z-index: 2;`" />
|
||||
</div>
|
||||
<div class="chip-results" style="grid-area: result-view; flex: 0 0 auto; align-self: center;">
|
||||
<f7-chip v-for="result in showResults.filter( r => { return r.aboveThreshold && r.isSearched && !r.isDeleted })"
|
||||
@@ -56,6 +56,7 @@
|
||||
</f7-button>
|
||||
</f7-segmented>
|
||||
<input type="file" ref="image_chooser" @change="getImage()" accept="image/*" style="display: none;"/>
|
||||
<img src="../assets/target.svg" ref="target_image" style="display: none;" />
|
||||
</f7-block>
|
||||
|
||||
<f7-panel :id="detectorName + '-settings'" right cover :backdrop="false" :container-el="`#${detectorName}-detect-page`">
|
||||
@@ -163,7 +164,6 @@
|
||||
case 'thorax':
|
||||
this.activeRegion = 0
|
||||
this.detectorName = 'thorax'
|
||||
//this.classesList = thoraxClasses
|
||||
/* VITE setting */
|
||||
this.modelLocation = `../models/thorax${this.otherSettings.mini ? '-mini' : ''}/model.json`
|
||||
/* PWA Build setting */
|
||||
@@ -172,7 +172,12 @@
|
||||
break;
|
||||
case 'abdomen':
|
||||
this.activeRegion = 1
|
||||
this.detectorName = 'combined'
|
||||
this.detectorName = 'abdomen'
|
||||
/* VITE setting */
|
||||
this.modelLocation = `../models/abdomen${this.otherSettings.mini ? '-mini' : ''}/model.json`
|
||||
/* PWA Build setting */
|
||||
//this.modelLocation = `./models/abdomen${this.otherSettings.mini ? '-mini' : ''}/model.json`
|
||||
this.modelLocationCordova = `https://localhost/models/abdomen${this.otherSettings.mini ? '-mini' : ''}/model.json`
|
||||
break;
|
||||
case 'limbs':
|
||||
this.activeRegion = 2
|
||||
@@ -284,7 +289,14 @@
|
||||
}
|
||||
if (mode == "camera") {
|
||||
this.videoAvailable = await this.openCamera(this.$refs.image_container)
|
||||
if (this.videoAvailable) { return }
|
||||
if (this.videoAvailable) {
|
||||
var trackDetails = this.cameraStream.getVideoTracks()[0].getSettings()
|
||||
var vidElement = this.$refs.vid_viewer
|
||||
vidElement.width = trackDetails.width
|
||||
vidElement.height = trackDetails.height
|
||||
this.videoFrameDetect(vidElement)
|
||||
return
|
||||
}
|
||||
}
|
||||
if (mode == 'sample') {
|
||||
f7.dialog.create({
|
||||
@@ -421,7 +433,7 @@
|
||||
this.selectChip(findBox >= 0 ? this.resultData.detections[findBox].resultIndex : this.selectedChip)
|
||||
},
|
||||
box2cvs(boxInput) {
|
||||
if (!boxInput) return []
|
||||
if (!boxInput || boxInput.length == 0) return []
|
||||
const boxList = boxInput.length ? boxInput : [boxInput]
|
||||
const [imCanvas, imageCtx] = this.resetView()
|
||||
var imgWidth
|
||||
|
||||
@@ -1,5 +1,7 @@
|
||||
import * as tf from '@tensorflow/tfjs'
|
||||
import { image } from '@tensorflow/tfjs'
|
||||
import { f7 } from 'framework7-vue'
|
||||
import { nextTick } from 'vue'
|
||||
|
||||
var model = null
|
||||
|
||||
@@ -150,7 +152,59 @@ export default {
|
||||
remoteTimeout () {
|
||||
this.detecting = false
|
||||
f7.dialog.alert('No connection to remote ALVINN instance. Please check app settings.')
|
||||
},
|
||||
async videoFrameDetect (vidData) {
|
||||
const [modelWidth, modelHeight] = model.inputs[0].shape.slice(1, 3)
|
||||
const imCanvas = this.$refs.image_cvs
|
||||
const imageCtx = imCanvas.getContext("2d")
|
||||
const target = this.$refs.target_image
|
||||
await tf.nextFrame();
|
||||
imCanvas.width = imCanvas.clientWidth
|
||||
imCanvas.height = imCanvas.clientHeight
|
||||
imageCtx.clearRect(0,0,imCanvas.width,imCanvas.height)
|
||||
var imgWidth
|
||||
var imgHeight
|
||||
const imgAspect = vidData.clientWidth / vidData.clientHeight
|
||||
const rendAspect = imCanvas.width / imCanvas.height
|
||||
if (imgAspect >= rendAspect) {
|
||||
imgWidth = imCanvas.width
|
||||
imgHeight = imCanvas.width / imgAspect
|
||||
} else {
|
||||
imgWidth = imCanvas.height * imgAspect
|
||||
imgHeight = imCanvas.height
|
||||
}
|
||||
while (this.videoAvailable) {
|
||||
console.time('frame-process')
|
||||
try {
|
||||
const input = tf.tidy(() => {
|
||||
return tf.image.resizeBilinear(tf.browser.fromPixels(vidData), [modelWidth, modelHeight]).div(255.0).expandDims(0)
|
||||
})
|
||||
const res = model.predict(input)
|
||||
const rawRes = tf.transpose(res,[0,2,1]).arraySync()[0]
|
||||
|
||||
let rawCoords = []
|
||||
if (rawRes) {
|
||||
for (var i = 0; i < rawRes.length; i++) {
|
||||
var getScores = rawRes[i].slice(4)
|
||||
if (getScores.some( s => s > .5)) {
|
||||
rawCoords.push(rawRes[i].slice(0,2))
|
||||
}
|
||||
}
|
||||
|
||||
imageCtx.clearRect(0,0,imCanvas.width,imCanvas.height)
|
||||
for (var coord of rawCoords) {
|
||||
console.log(`x: ${coord[0]}, y: ${coord[1]}`)
|
||||
let pointX = (imCanvas.width - imgWidth) / 2 + (coord[0] / modelWidth) * imgWidth -5
|
||||
let pointY = (imCanvas.height - imgHeight) / 2 + (coord[1] / modelHeight) * imgHeight -5
|
||||
imageCtx.drawImage(target, pointX, pointY, 20, 20)
|
||||
}
|
||||
}
|
||||
} catch (e) {
|
||||
console.log(e)
|
||||
}
|
||||
console.timeEnd('frame-process')
|
||||
await tf.nextFrame();
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
Reference in New Issue
Block a user