9 Commits

Author SHA1 Message Date
71b7b16bdd Add router debugging keys
All checks were successful
Build Dev PWA / Build-PWA (push) Successful in 37s
Signed-off-by: Justin Georgi <justin.georgi@gmail.com>
2024-10-16 09:16:19 -07:00
1936d90550 Fix structure click selection error
All checks were successful
Build Dev PWA / Build-PWA (push) Successful in 37s
Signed-off-by: Justin Georgi <justin.georgi@gmail.com>
2024-10-12 20:22:41 -07:00
8bf74e51ea Cleanup additional tensor in detection worker
All checks were successful
Build Dev PWA / Build-PWA (push) Successful in 37s
Signed-off-by: Justin Georgi <justin.georgi@gmail.com>
2024-10-12 15:25:53 -07:00
21e46713a7 Fix infolink error on chip auto deselection
All checks were successful
Build Dev PWA / Build-PWA (push) Successful in 37s
Signed-off-by: Justin Georgi <justin.georgi@gmail.com>
2024-10-12 15:03:32 -07:00
726d56131c Clean up variable declarations
All checks were successful
Build Dev PWA / Build-PWA (push) Successful in 37s
Signed-off-by: Justin Georgi <justin.georgi@gmail.com>
2024-10-11 20:20:00 -07:00
874901086d Add Structure Class (#203)
All checks were successful
Build Dev PWA / Build-PWA (push) Successful in 38s
This PR adds a class for detected structures and two additional classes to handle the coordinates of the bounding boxes for those structures.  The classes do a better job handling conversions between image pixels, canvas pixels, and screen pixels.  This means that they take the place of several chunks of the previous code such as the box2cvs function.

Reviewed-on: #203
2024-10-12 02:28:22 +00:00
a98577e206 Optimize reactive vue data variables
All checks were successful
Build Dev PWA / Build-PWA (push) Successful in 37s
Signed-off-by: Justin Georgi <justin.georgi@gmail.com>
2024-10-05 16:30:33 -07:00
9e90823858 Fix meta tag warning
All checks were successful
Build Dev PWA / Build-PWA (push) Successful in 38s
Signed-off-by: Justin Georgi <justin.georgi@gmail.com>
2024-10-04 18:52:00 -07:00
269e62b5fb Fix scrolling on non-detect pages
Signed-off-by: Justin Georgi <justin.georgi@gmail.com>
2024-10-04 18:51:29 -07:00
12 changed files with 351 additions and 227 deletions

View File

@@ -75,14 +75,18 @@ async function localDetect(imageData) {
console.time('sw: post-process')
const outputSize = res.shape[1]
const output = {
detections: []
}
let rawBoxes = []
let rawScores = []
let getScores, getBox, boxCalc
for (var i = 0; i < rawRes.length; i++) {
var getScores = rawRes[i].slice(4)
for (let i = 0; i < rawRes.length; i++) {
getScores = rawRes[i].slice(4)
if (getScores.every( s => s < .05)) { continue }
var getBox = rawRes[i].slice(0,4)
var boxCalc = [
getBox = rawRes[i].slice(0,4)
boxCalc = [
(getBox[0] - (getBox[2] / 2)) / modelWidth,
(getBox[1] - (getBox[3] / 2)) / modelHeight,
(getBox[0] + (getBox[2] / 2)) / modelWidth,
@@ -101,7 +105,7 @@ async function localDetect(imageData) {
let boxes_data = []
let scores_data = []
let classes_data = []
for (var c = 0; c < outputSize - 4; c++) {
for (let c = 0; c < outputSize - 4; c++) {
structureScores = rawScores.map(x => x[c])
tScores = tf.tensor1d(structureScores)
resBoxes = await tf.image.nonMaxSuppressionAsync(tBoxes,tScores,10,0.5,.05)
@@ -109,7 +113,7 @@ async function localDetect(imageData) {
tf.dispose(resBoxes)
if (validBoxes) {
boxes_data.push(...rawBoxes.filter( (_, idx) => validBoxes.includes(idx)))
var outputScores = structureScores.filter( (_, idx) => validBoxes.includes(idx))
let outputScores = structureScores.filter( (_, idx) => validBoxes.includes(idx))
scores_data.push(...outputScores)
classes_data.push(...outputScores.fill(c))
}
@@ -119,18 +123,15 @@ async function localDetect(imageData) {
tf.dispose(tBoxes)
tf.dispose(tScores)
tf.dispose(tRes)
tf.dispose(resBoxes)
const valid_detections_data = classes_data.length
var output = {
detections: []
}
for (var i =0; i < valid_detections_data; i++) {
var [dLeft, dTop, dRight, dBottom] = boxes_data[i]
for (let i =0; i < valid_detections_data; i++) {
let [dLeft, dTop, dRight, dBottom] = boxes_data[i]
output.detections.push({
"top": dTop,
"left": dLeft,
"bottom": dBottom,
"right": dRight,
// "label": this.detectorLabels[classes_data[i]].name,
"label": classes_data[i],
"confidence": scores_data[i] * 100
})
@@ -155,7 +156,7 @@ async function videoFrame (vidData) {
const rawRes = tf.transpose(res,[0,2,1]).arraySync()[0]
if (rawRes) {
for (var i = 0; i < rawRes.length; i++) {
for (let i = 0; i < rawRes.length; i++) {
let getScores = rawRes[i].slice(4)
if (getScores.some( s => s > .5)) {
let foundTarget = rawRes[i].slice(0,2)

View File

@@ -74,12 +74,14 @@
}
},
async created () {
document.addEventListener('wheel', event => {
event.preventDefault()
}, {passive: false})
document.addEventListener('touchmove', event => {
event.preventDefault()
}, {passive: false})
document.addEventListener('keydown', e => {
if (e.code == 'KeyR') {
console.log(f7.views.main.router.history)
}
if (e.code == 'KeyB') {
f7.views.main.router.back()
}
})
if (!window.cordova) {
const confText = await fetch('./conf/conf.yaml')
.then((mod) => { return mod.text() })

View File

@@ -18,7 +18,7 @@
<meta name="msapplication-tap-highlight" content="no">
<title>ALVINN</title>
<% if (TARGET === 'web') { %>
<meta name="apple-mobile-web-app-capable" content="yes">
<meta name="mobile-web-app-capable" content="yes">
<meta name="apple-mobile-web-app-status-bar-style" content="black-translucent">
<link rel="apple-touch-icon" href="icons/apple-touch-icon.png">
<link rel="icon" href="icons/favicon.png">

157
src/js/structures.js Normal file
View File

@@ -0,0 +1,157 @@
class Coordinate {
constructor(x, y) {
this.x = x
this.y = y
}
toRefFrame(...frameArgs) {
if (frameArgs.length == 0) {
return {x: this.x, y: this.y}
}
let outFrames = []
//Get Coordinates in Image Reference Frame
if (frameArgs[0].tagName == 'IMG' && frameArgs[0].width && frameArgs[0].height) {
outFrames.push({
x: this.x * frameArgs[0].width,
y: this.y * frameArgs[0].height
})
} else {
throw new Error('Coordinate: invalid reference frame for frameType: Image')
}
//Get Coordinates in Canvas Reference Frame
if (frameArgs[1]) {
if (frameArgs[1].tagName == 'CANVAS' && frameArgs[1].width && frameArgs[1].height) {
let imgWidth
let imgHeight
const imgAspect = frameArgs[0].width / frameArgs[0].height
const rendAspect = frameArgs[1].width / frameArgs[1].height
if (imgAspect >= rendAspect) {
imgWidth = frameArgs[1].width
imgHeight = frameArgs[1].width / imgAspect
} else {
imgWidth = frameArgs[1].height * imgAspect
imgHeight = frameArgs[1].height
}
outFrames.push({
x: (frameArgs[1].width - imgWidth) / 2 + this.x * imgWidth,
y: (frameArgs[1].height - imgHeight) / 2 + this.y * imgHeight
})
} else {
throw new Error('Coordinate: invalid reference frame for frameType: Canvas')
}
}
//Get Coordinates in Screen Reference Frame
if (frameArgs[2]) {
if (frameArgs[2].zoom && frameArgs[2].offset && frameArgs[2].offset.x !== undefined && frameArgs[2].offset.y !== undefined) {
outFrames.push({
x: outFrames[1].x * frameArgs[2].zoom + frameArgs[2].offset.x,
y: outFrames[1].y * frameArgs[2].zoom + frameArgs[2].offset.y
})
} else {
throw new Error('Coordinate: invalid reference frame for frameType: Screen')
}
}
return outFrames
}
toString() {
return `(x: ${this.x}, y: ${this.y})`
}
}
export class StructureBox {
constructor(top, left, bottom, right) {
this.topLeft = new Coordinate(left, top)
this.bottomRight = new Coordinate(right, bottom)
}
getBoxes(boxType, ...frameArgs) {
let lowerH, lowerV, calcSide
switch (boxType) {
case 'point':
lowerH = 'right'
lowerV = 'bottom'
break
case 'side':
lowerH = 'width'
lowerV = 'height'
calcSide = true
break
default:
throw new Error(`StructureBox: invalid boxType - ${boxType}`)
}
if (frameArgs.length == 0) {
return {
left: this.topLeft.x,
top: this.topLeft.y,
[lowerH]: this.bottomRight.x - ((calcSide) ? this.topLeft.x : 0),
[lowerV]: this.bottomRight.y - ((calcSide) ? this.topLeft.y : 0)
}
}
const tL = this.topLeft.toRefFrame(...frameArgs)
const bR = this.bottomRight.toRefFrame(...frameArgs)
let outBoxes = []
tL.forEach((cd, i) => {
outBoxes.push({
left: cd.x,
top: cd.y,
[lowerH]: bR[i].x - ((calcSide) ? cd.x : 0),
[lowerV]: bR[i].y - ((calcSide) ? cd.y : 0)
})
})
return outBoxes
}
}
export class Structure {
constructor(structResult) {
this.label = structResult.label
this.confidence = structResult.confidence
this.box = new StructureBox(
structResult.top,
structResult.left,
structResult.bottom,
structResult.right
)
this.deleted = false
this.index = -1
this.passThreshold = true
this.searched = false
}
get resultIndex() {
return this.index
}
set resultIndex(newIdx) {
this.index = newIdx
}
get isDeleted() {
return this.deleted
}
set isDeleted(del) {
this.deleted = !!del
}
get isSearched() {
return this.searched
}
set isSearched(ser) {
this.searched = !!ser
}
get aboveThreshold() {
return this.passThreshold
}
setThreshold(level) {
if (typeof level != 'number') {
throw new Error(`Structure: invalid threshold level ${level}`)
}
this.passThreshold = this.confidence >= level
}
}

View File

@@ -3,11 +3,11 @@ import { f7 } from 'framework7-vue'
export default {
methods: {
async openCamera(imContain) {
var cameraLoaded = false
let cameraLoaded = false
const devicesList = await navigator.mediaDevices.enumerateDevices()
this.videoDeviceAvailable = devicesList.some( d => d.kind == "videoinput")
if (this.videoDeviceAvailable) {
var vidConstraint = {
let videoDeviceAvailable = devicesList.some( d => d.kind == "videoinput")
if (videoDeviceAvailable) {
let vidConstraint = {
video: {
width: {
ideal: imContain.offsetWidth
@@ -41,24 +41,24 @@ export default {
tempCtx.drawImage(vidViewer, 0, 0)
this.getImage(tempCVS.toDataURL())
},
async videoFrameDetectWorker (vidData) {
async videoFrameDetectWorker (vidData, vidWorker) {
const startDetection = () => {
createImageBitmap(vidData).then(imVideoFrame => {
this.vidWorker.postMessage({call: 'videoFrame', image: imVideoFrame}, [imVideoFrame])
vidWorker.postMessage({call: 'videoFrame', image: imVideoFrame}, [imVideoFrame])
})
}
vidData.addEventListener('resize',startDetection,{once: true})
this.vidWorker.onmessage = (eVid) => {
vidWorker.onmessage = (eVid) => {
if (eVid.data.error) {
console.log(eVid.data.message)
f7.dialog.alert(`ALVINN AI model error: ${eVid.data.message}`)
} else if (this.videoAvailable) {
createImageBitmap(vidData).then(imVideoFrame => {
this.vidWorker.postMessage({call: 'videoFrame', image: imVideoFrame}, [imVideoFrame])
vidWorker.postMessage({call: 'videoFrame', image: imVideoFrame}, [imVideoFrame])
})
if (eVid.data.coords) {
imageCtx.clearRect(0,0,imCanvas.width,imCanvas.height)
for (var coord of eVid.data.coords) {
for (let coord of eVid.data.coords) {
let pointX = (imCanvas.width - imgWidth) / 2 + (coord[0] / eVid.data.modelWidth) * imgWidth - 10
let pointY = (imCanvas.height - imgHeight) / 2 + (coord[1] / eVid.data.modelHeight) * imgHeight - 10
console.debug(`cx: ${pointX}, cy: ${pointY}`)
@@ -72,8 +72,7 @@ export default {
const imCanvas = this.$refs.image_cvs
const imageCtx = imCanvas.getContext("2d")
const target = this.$refs.target_image
var imgWidth
var imgHeight
let imgWidth, imgHeight
f7.utils.nextFrame(() => {
imCanvas.width = imCanvas.clientWidth
imCanvas.height = imCanvas.clientHeight

View File

@@ -56,7 +56,7 @@
},
computed: {
commentText () {
var text = f7.textEditor.get('.comment-editor').getValue()
let text = f7.textEditor.get('.comment-editor').getValue()
if (this.userEmail) {
text += `\\n\\nSubmitted by: ${this.userEmail}`
}
@@ -65,9 +65,9 @@
},
methods: {
sendFeedback () {
var self = this
var issueURL = `https://gitea.azgeorgis.net/api/v1/repos/Georgi_Lab/ALVINN_f7/issues?access_token=9af8ae15b1ee5a98afcb3083bb488e4cf3c683af`
var xhr = new XMLHttpRequest()
let self = this
const issueURL = `https://gitea.azgeorgis.net/api/v1/repos/Georgi_Lab/ALVINN_f7/issues?access_token=9af8ae15b1ee5a98afcb3083bb488e4cf3c683af`
let xhr = new XMLHttpRequest()
xhr.open("POST", issueURL)
xhr.setRequestHeader('Content-Type', 'application/json')
xhr.setRequestHeader('accept', 'application/json')

View File

@@ -1,8 +1,8 @@
<template>
<f7-page name="detect" :id="detectorName + '-detect-page'">
<f7-page name="detect" :id="detectorName + '-detect-page'" @wheel="(e = $event) => e.preventDefault()" @touchmove="(e = $event) => e.preventDefault()">
<!-- Top Navbar -->
<f7-navbar :sliding="false" :back-link="true" back-link-url="/" back-link-force>
<f7-nav-title sliding>{{ regions[activeRegion] }}</f7-nav-title>
<f7-nav-title sliding>{{ regionTitle }}</f7-nav-title>
<f7-nav-right>
<f7-link v-if="!isCordova" :icon-only="true" tooltip="Fullscreen" :icon-f7="isFullscreen ? 'viewfinder_circle_fill' : 'viewfinder'" @click="toggleFullscreen"></f7-link>
<f7-link :icon-only="true" tooltip="ALVINN help" icon-f7="question_circle_fill" href="/help/"></f7-link>
@@ -29,8 +29,9 @@
@click="structureClick"
:style="`display: ${(imageLoaded || videoAvailable) ? 'block' : 'none'}; flex: 1 1 0%; max-width: 100%; max-height: 100%; min-width: 0; min-height: 0; background-size: contain; background-position: center; background-repeat: no-repeat; z-index: 2;`"
></canvas>
<f7-link v-if="getInfoUrl && (selectedChip > -1)"
:style="`left: ${infoLinkPos.x}px; top: ${infoLinkPos.y}px; transform: translate(-50%,-50%); background: hsla(${showResults[selectedChip].confidence / 100 * 120}deg, 100%, 50%, .5)`" class="structure-info"
<f7-link v-if="getInfoUrl && (selectedChip > -1) && showResults[selectedChip]"
:style="`left: ${infoLinkPos.x}px; top: ${infoLinkPos.y}px; transform: translate(-50%,-50%); background: hsla(${showResults[selectedChip].confidence / 100 * 120}deg, 100%, 50%, .5)`"
class="structure-info"
:icon-only="true"
icon-f7="info"
target="_blank"
@@ -143,7 +144,22 @@
import touchMixin from './touch-mixin'
import detectionWorker from '@/assets/detect-worker.js?worker&inline'
import { Structure, StructureBox } from '../js/structures'
const regions = ['Thorax','Abdomen/Pelvis','Limbs','Head and Neck']
let activeRegion = 4
let classesList = []
let imageLoadMode = "environment"
let serverSettings = {}
let otherSettings = {}
let modelLocation = ''
let miniLocation = ''
let reloadModel = false
let detectWorker = null
let vidWorker = null
let canvasMoving = false
let imageLocation = new StructureBox(0, 0, 1, 1)
export default {
mixins: [submitMixin, detectionMixin, cameraMixin, touchMixin],
props: {
@@ -155,38 +171,24 @@
},
data () {
return {
regions: ['Thorax','Abdomen/Pelvis','Limbs','Head and Neck'],
resultData: {},
selectedChip: -1,
activeRegion: 4,
classesList: [],
imageLoaded: false,
imageView: new Image(),
imCvsLocation: {},
imageLoadMode: "environment",
detecting: false,
detectPanel: false,
showDetectSettings: false,
detectorName: '',
detectorLevel: 50,
detectorLabels: [],
serverSettings: {},
otherSettings: {},
isCordova: !!window.cordova,
secureProtocol: location.protocol == 'https:',
uploadUid: null,
uploadDirty: false,
modelLocation: '',
miniLocation: '',
modelLoading: true,
reloadModel: false,
videoDeviceAvailable: false,
videoAvailable: false,
cameraStream: null,
infoLinkPos: {},
detectWorker: null,
vidWorker: null,
canvasMoving: false,
canvasOffset: {x: 0, y: 0},
canvasZoom: 1,
structureZoomed: false,
@@ -198,44 +200,44 @@
},
created () {
let loadOtherSettings = localStorage.getItem('otherSettings')
if (loadOtherSettings) this.otherSettings = JSON.parse(loadOtherSettings)
if (loadOtherSettings) otherSettings = JSON.parse(loadOtherSettings)
this.detectorName = this.f7route.params.region
switch (this.detectorName) {
case 'thorax':
this.activeRegion = 0
activeRegion = 0
break;
case 'abdomen':
this.activeRegion = 1
activeRegion = 1
break;
case 'limbs':
this.activeRegion = 2
activeRegion = 2
break;
case 'head':
this.activeRegion = 3
activeRegion = 3
break;
}
let modelJ = `../models/${this.detectorName}${this.otherSettings.mini ? '-mini' : ''}/model.json`
let modelJ = `../models/${this.detectorName}${otherSettings.mini ? '-mini' : ''}/model.json`
let miniJ = `../models/${this.detectorName}-mini/model.json`
this.modelLocation = new URL(modelJ,import.meta.url).href
this.miniLocation = new URL(miniJ,import.meta.url).href
modelLocation = new URL(modelJ,import.meta.url).href
miniLocation = new URL(miniJ,import.meta.url).href
let classesJ = `../models/${this.detectorName}/classes.json`
fetch(new URL(classesJ,import.meta.url).href)
.then((mod) => { return mod.json() })
.then((classes) => {
this.classesList = classes
this.detectorLabels = this.classesList.map( l => { return {'name': l, 'detect': true} } )
classesList = classes
this.detectorLabels = classesList.map( l => { return {'name': l, 'detect': true} } )
})
var loadServerSettings = localStorage.getItem('serverSettings')
if (loadServerSettings) this.serverSettings = JSON.parse(loadServerSettings)
const loadServerSettings = localStorage.getItem('serverSettings')
if (loadServerSettings) serverSettings = JSON.parse(loadServerSettings)
},
mounted () {
if (this.serverSettings && this.serverSettings.use) {
if (serverSettings && serverSettings.use) {
this.getRemoteLabels()
this.modelLoading = false
} else {
this.modelLoading = true
if (!this.useWorkers) {
this.loadModel(this.modelLocation, true).then(() => {
this.loadModel(modelLocation, true).then(() => {
this.modelLoading = false
}).catch((e) => {
console.log(e.message)
@@ -243,8 +245,8 @@
this.modelLoading = false
})
} else {
this.detectWorker = new detectionWorker()
this.detectWorker.onmessage = (eMount) => {
detectWorker = new detectionWorker()
detectWorker.onmessage = (eMount) => {
self = this
if (eMount.data.error) {
console.log(eMount.data.message)
@@ -252,21 +254,24 @@
}
self.modelLoading = false
}
this.vidWorker = new detectionWorker()
this.vidWorker.onmessage = (eMount) => {
vidWorker = new detectionWorker()
vidWorker.onmessage = (eMount) => {
self = this
if (eMount.data.error) {
console.log(eMount.data.message)
f7.dialog.alert(`ALVINN AI nano model error: ${eMount.data.message}`)
}
}
this.detectWorker.postMessage({call: 'loadModel', weights: this.modelLocation, preload: true})
this.vidWorker.postMessage({call: 'loadModel', weights: this.miniLocation, preload: true})
detectWorker.postMessage({call: 'loadModel', weights: modelLocation, preload: true})
vidWorker.postMessage({call: 'loadModel', weights: miniLocation, preload: true})
}
}
window.onresize = (e) => { if (this.$refs.image_cvs) this.selectChip('redraw') }
},
computed: {
regionTitle () {
return regions[activeRegion]
},
message () {
if (this.modelLoading) {
return "Preparing ALVINN..."
@@ -279,17 +284,17 @@
}
},
showResults () {
var filteredResults = this.resultData.detections
let filteredResults = this.resultData.detections
if (!filteredResults) return []
var allSelect = this.detectorLabels.every( s => { return s.detect } )
var selectedLabels = this.detectorLabels
const allSelect = this.detectorLabels.every( s => { return s.detect } )
const selectedLabels = this.detectorLabels
.filter( l => { return l.detect })
.map( l => { return l.name })
filteredResults.forEach( (d, i) => {
filteredResults[i].resultIndex = i
filteredResults[i].aboveThreshold = d.confidence >= this.detectorLevel
filteredResults[i].isSearched = allSelect || selectedLabels.includes(d.label)
d.resultIndex = i
d.setThreshold(this.detectorLevel)
d.isSearched = allSelect || selectedLabels.includes(d.label)
})
if (!filteredResults.some( s => s.resultIndex == this.selectedChip && s.aboveThreshold && s.isSearched && !s.isDeleted)) {
@@ -310,7 +315,7 @@
}
},
demoEnabled () {
return this.otherSettings.demo || this.demoMode
return otherSettings.demo || this.demoMode
},
infoLinkTarget () {
if (!this.getInfoUrl) return ''
@@ -324,8 +329,8 @@
return `--chip-media-gradient: conic-gradient(from ${270 - (confFactor * 360 / 2)}deg, hsl(${confFactor * 120}deg, 100%, 50%) ${confFactor}turn, hsl(${confFactor * 120}deg, 50%, 66%) ${confFactor}turn)`
},
async setData () {
if (this.detectWorker) {
this.detectWorker.onmessage = (eDetect) => {
if (detectWorker) {
detectWorker.onmessage = (eDetect) => {
self = this
if (eDetect.data.error) {
self.detecting = false
@@ -334,13 +339,15 @@
f7.dialog.alert(`ALVINN structure finding error: ${eDetect.data.message}`)
} else if (eDetect.data.success == 'detection') {
self.detecting = false
self.resultData = eDetect.data.detections
if (self.resultData) {
self.resultData.detections.map(d => {d.label = self.detectorLabels[d.label].name})
}
self.resultData = {detections: []}
eDetect.data.detections.detections.forEach((d) => {
d.label = self.detectorLabels[d.label].name
let detectedStructure = new Structure(d)
self.resultData.detections.push(detectedStructure)
})
self.uploadDirty = true
} else if (eDetect.data.success == 'model') {
self.reloadModel = false
reloadModel = false
loadSuccess()
}
f7.utils.nextFrame(() => {
@@ -352,26 +359,26 @@
let loadSuccess = null
let loadFailure = null
let modelReloading = null
if (!this.useWorkers && this.reloadModel) {
await this.loadModel(this.modelLocation)
this.reloadModel = false
if (!this.useWorkers && reloadModel) {
await this.loadModel(modelLocation)
reloadModel = false
} else {
modelReloading = new Promise((res, rej) => {
loadSuccess = res
loadFailure = rej
if (this.reloadModel) {
this.detectWorker.postMessage({call: 'loadModel', weights: this.modelLocation})
if (reloadModel) {
detectWorker.postMessage({call: 'loadModel', weights: modelLocation})
} else {
loadSuccess()
}
})
}
if (this.serverSettings && this.serverSettings.use) {
if (serverSettings && serverSettings.use) {
this.remoteDetect()
} else if (this.useWorkers) {
Promise.all([modelReloading,createImageBitmap(this.imageView)]).then(res => {
this.detectWorker.postMessage({call: 'localDetect', image: res[1]}, [res[1]])
detectWorker.postMessage({call: 'localDetect', image: res[1]}, [res[1]])
})
} else {
createImageBitmap(this.imageView).then(res => {
@@ -399,12 +406,12 @@
}
},
async selectImage (mode) {
this.imageLoadMode = mode
imageLoadMode = mode
if (this.isCordova && mode == "camera") {
navigator.camera.getPicture(this.getImage, this.onFail, { quality: 50, destinationType: Camera.DestinationType.DATA_URL, correctOrientation: true });
return
}
if (mode == "camera" && !this.otherSettings.disableVideo) {
if (mode == "camera" && !otherSettings.disableVideo) {
this.videoAvailable = await this.openCamera(this.$refs.image_container)
if (this.videoAvailable) {
this.selectedChip = -1
@@ -412,14 +419,14 @@
this.imageView.src = null
this.$refs.image_cvs.style['background-image'] = 'none'
this.resultData = {}
var trackDetails = this.cameraStream.getVideoTracks()[0].getSettings()
var vidElement = this.$refs.vid_viewer
const trackDetails = this.cameraStream.getVideoTracks()[0].getSettings()
let vidElement = this.$refs.vid_viewer
vidElement.width = trackDetails.width
vidElement.height = trackDetails.height
if (!this.useWorkers) {
this.videoFrameDetect(vidElement)
this.videoFrameDetect(vidElement, miniLocation)
} else {
this.videoFrameDetectWorker(vidElement)
this.videoFrameDetectWorker(vidElement, vidWorker)
}
return
}
@@ -445,7 +452,6 @@
return clip[0].getType("image/png");
}).then(blob => {
let clipImage = URL.createObjectURL(blob);
console.log(clipImage)
this.getImage(clipImage)
}).catch(e => {
console.log(e)
@@ -473,33 +479,25 @@
iChip = this.selectedChip
}
const [imCanvas, imageCtx] = this.resetView(true)
let structLeft = this.imageView.width * this.resultData.detections[iChip].left
let structTop = this.imageView.height * this.resultData.detections[iChip].top
let structWidth = this.imageView.width * (this.resultData.detections[iChip].right - this.resultData.detections[iChip].left)
let structHeight = this.imageView.height * (this.resultData.detections[iChip].bottom - this.resultData.detections[iChip].top)
let structBox, cvsBox, screenBox
[structBox, cvsBox, screenBox] = this.resultData.detections[iChip].box.getBoxes('side', this.imageView, imCanvas, {zoom: this.canvasZoom, offset: {...this.canvasOffset}})
const boxCoords = this.box2cvs(this.resultData.detections[iChip])[0]
this.infoLinkPos.x = Math.min(Math.max(screenBox.left, 0),imCanvas.width)
this.infoLinkPos.y = Math.min(Math.max(screenBox.top, 0), imCanvas.height)
let boxLeft = boxCoords.cvsLeft
let boxTop = boxCoords.cvsTop
let boxWidth = boxCoords.cvsRight - boxCoords.cvsLeft
let boxHeight = boxCoords.cvsBottom - boxCoords.cvsTop
this.infoLinkPos.x = Math.min(Math.max(boxCoords.cvsLeft * this.canvasZoom + this.canvasOffset.x, 0),imCanvas.width)
this.infoLinkPos.y = Math.min(Math.max(boxCoords.cvsTop * this.canvasZoom + this.canvasOffset.y, 0), imCanvas.height)
let imageScale = Math.max(this.imageView.width / imCanvas.width, this.imageView.height / imCanvas.height)
imageCtx.drawImage(this.imageView, structLeft, structTop, structWidth, structHeight, boxLeft, boxTop, boxWidth, boxHeight)
const imageScale = Math.max(this.imageView.width / imCanvas.width, this.imageView.height / imCanvas.height)
imageCtx.drawImage(this.imageView, structBox.left, structBox.top, structBox.width, structBox.height, cvsBox.left, cvsBox.top, cvsBox.width, cvsBox.height)
imageCtx.save()
imageCtx.arc(boxLeft, boxTop, 14 / this.canvasZoom, 0, 2 * Math.PI)
imageCtx.arc(cvsBox.left, cvsBox.top, 14 / this.canvasZoom, 0, 2 * Math.PI)
imageCtx.closePath()
imageCtx.clip()
imageCtx.drawImage(this.imageView,
structLeft - (14 / this.canvasZoom * imageScale),
structTop - (14 / this.canvasZoom * imageScale),
structBox.left - (14 / this.canvasZoom * imageScale),
structBox.top - (14 / this.canvasZoom * imageScale),
(28 / this.canvasZoom * imageScale),
(28 / this.canvasZoom * imageScale),
boxLeft - (14 / this.canvasZoom),
boxTop - (14 / this.canvasZoom),
cvsBox.left - (14 / this.canvasZoom),
cvsBox.top - (14 / this.canvasZoom),
(28 / this.canvasZoom), (28 / this.canvasZoom))
imageCtx.restore()
this.selectedChip = iChip
@@ -529,13 +527,9 @@
imageCtx.strokeStyle = 'yellow'
imageCtx.lineWidth = 3 / this.canvasZoom
if (this.imageLoaded) {
let imageLoc = this.box2cvs({top: 0,left: 0,right: 1,bottom: 1})
this.imCvsLocation.top = imageLoc[0].cvsTop
this.imCvsLocation.left = imageLoc[0].cvsLeft
this.imCvsLocation.width = imageLoc[0].cvsRight - imageLoc[0].cvsLeft
this.imCvsLocation.height = imageLoc[0].cvsBottom - imageLoc[0].cvsTop
const imageLoc = imageLocation.getBoxes('side', this.imageView, imCanvas)
if (drawChip) {imageCtx.globalAlpha = .5}
imageCtx.drawImage(this.imageView, 0, 0, this.imageView.width, this.imageView.height, this.imCvsLocation.left, this.imCvsLocation.top, this.imCvsLocation.width, this.imCvsLocation.height)
imageCtx.drawImage(this.imageView, 0, 0, this.imageView.width, this.imageView.height, imageLoc[1].left, imageLoc[1].top, imageLoc[1].width, imageLoc[1].height)
if (drawChip) {imageCtx.globalAlpha = 1}
}
this.structureZoomed = false
@@ -546,13 +540,13 @@
if (this.videoAvailable) {
this.closeCamera()
this.detecting = true
this.reloadModel = true
reloadModel = true
resolve(searchImage)
} else if (this.isCordova && this.imageLoadMode == "camera") {
} else if (this.isCordova && imageLoadMode == "camera") {
this.detecting = true
resolve('data:image/jpg;base64,' + searchImage)
}
if (this.imageLoadMode == 'clipboard') {
if (imageLoadMode == 'clipboard') {
this.detecting = true
resolve(searchImage)
}
@@ -561,7 +555,7 @@
this.detecting = true
resolve(reader.result)
},{once: true})
if (this.imageLoadMode == 'sample') {
if (imageLoadMode == 'sample') {
fetch(`${this.isCordova ? 'https://localhost' : '.'}/samples/${this.detectorName}-${searchImage}.jpeg`).then( resp => {
return resp.blob()
}).then(respBlob => {
@@ -589,12 +583,8 @@
imCanvas.width = imCanvas.clientWidth
imCanvas.height = imCanvas.clientHeight
const imageCtx = imCanvas.getContext("2d")
let imageLoc = this.box2cvs({top: 0,left: 0,right: 1,bottom: 1})
this.imCvsLocation.top = imageLoc[0].cvsTop
this.imCvsLocation.left = imageLoc[0].cvsLeft
this.imCvsLocation.width = imageLoc[0].cvsRight - imageLoc[0].cvsLeft
this.imCvsLocation.height = imageLoc[0].cvsBottom - imageLoc[0].cvsTop
imageCtx.drawImage(this.imageView, 0, 0, this.imageView.width, this.imageView.height, this.imCvsLocation.left, this.imCvsLocation.top, this.imCvsLocation.width, this.imCvsLocation.height)
const imageLoc = imageLocation.getBoxes('side', this.imageView, imCanvas)
imageCtx.drawImage(this.imageView, 0, 0, this.imageView.width, this.imageView.height, imageLoc[1].left, imageLoc[1].top, imageLoc[1].width, imageLoc[1].height)
f7.utils.nextFrame(() => {
this.setData()
})
@@ -604,7 +594,7 @@
})
},
async submitData () {
var uploadData = this.showResults
let uploadData = this.showResults
.filter( d => { return d.aboveThreshold && d.isSearched && !d.isDeleted })
.map( r => { return {"top": r.top, "left": r.left, "bottom": r.bottom, "right": r.right, "label": r.label}})
this.uploadUid = await this.uploadData(this.imageView.src.split(',')[1],uploadData,this.uploadUid)
@@ -621,50 +611,28 @@
if (li >= numBoxes) li -= numBoxes
return li
}
let boxCoords = this.box2cvs(this.showResults)
let boxCoords = []
this.resultData.detections.forEach(d => {
let cvsBox = d.box.getBoxes('point',this.imageView,this.$refs.image_cvs)[1]
cvsBox.clickable = d.aboveThreshold && d.isSearched && !d.isDeleted
boxCoords.push(cvsBox)
})
const numBoxes = boxCoords.length
let clickX = (e.offsetX - this.canvasOffset.x) / this.canvasZoom
let clickY = (e.offsetY - this.canvasOffset.y) / this.canvasZoom
let boxEnd = boxCoords.splice(0, this.selectedChip)
boxCoords = boxCoords.concat(boxEnd)
var findBox = boxCoords.findIndex( (r, i) => {
const findBox = boxCoords.findIndex( (r, i) => {
let di = loopIndex(i)
if (di == this.selectedChip ) return false
return r.cvsLeft <= clickX &&
r.cvsRight >= clickX &&
r.cvsTop <= clickY &&
r.cvsBottom >= clickY &&
this.resultData.detections[di].aboveThreshold &&
this.resultData.detections[di].isSearched &&
!this.resultData.detections[di].isDeleted
return r.clickable &&
r.left <= clickX &&
r.right >= clickX &&
r.top <= clickY &&
r.bottom >= clickY
})
this.selectChip(findBox >= 0 ? this.resultData.detections[loopIndex(findBox)].resultIndex : this.selectedChip)
},
box2cvs(boxInput) {
if (!boxInput || boxInput.length == 0) return []
const boxList = boxInput.length ? boxInput : [boxInput]
const imCanvas = this.$refs.image_cvs
var imgWidth
var imgHeight
const imgAspect = this.imageView.width / this.imageView.height
const rendAspect = imCanvas.width / imCanvas.height
if (imgAspect >= rendAspect) {
imgWidth = imCanvas.width
imgHeight = imCanvas.width / imgAspect
} else {
imgWidth = imCanvas.height * imgAspect
imgHeight = imCanvas.height
}
const cvsCoords = boxList.map( (d, i) => {
return {
"cvsLeft": (imCanvas.width - imgWidth) / 2 + d.left * imgWidth,
"cvsRight": (imCanvas.width - imgWidth) / 2 + d.right * imgWidth,
"cvsTop": (imCanvas.height - imgHeight) / 2 + d.top * imgHeight,
"cvsBottom": (imCanvas.height - imgHeight) / 2 + d.bottom * imgHeight
}
})
return cvsCoords
},
toggleSettings() {
this.showDetectSettings = !this.showDetectSettings
f7.utils.nextFrame(() => {
@@ -672,13 +640,13 @@
})
},
startMove() {
this.canvasMoving = true
canvasMoving = true
},
endMove() {
this.canvasMoving = false
canvasMoving = false
},
makeMove(event) {
if (this.canvasMoving) {
if (canvasMoving) {
this.canvasOffset.x += event.movementX
this.canvasOffset.y += event.movementY
this.selectChip("redraw")
@@ -704,11 +672,11 @@
},
zoomToSelected() {
const imCanvas = this.$refs.image_cvs
const boxCoords = this.box2cvs(this.resultData.detections[this.selectedChip])[0]
const boxWidth = boxCoords.cvsRight - boxCoords.cvsLeft
const boxHeight = boxCoords.cvsBottom - boxCoords.cvsTop
const boxMidX = (boxCoords.cvsRight + boxCoords.cvsLeft ) / 2
const boxMidY = (boxCoords.cvsBottom + boxCoords.cvsTop ) / 2
const boxCoords = this.resultData.detections[this.selectedChip].box.getBoxes('point', this.imageView, imCanvas)
const boxWidth = boxCoords[1].right - boxCoords[1].left
const boxHeight = boxCoords[1].bottom - boxCoords[1].top
const boxMidX = (boxCoords[1].right + boxCoords[1].left ) / 2
const boxMidY = (boxCoords[1].bottom + boxCoords[1].top ) / 2
const zoomFactor = Math.min(imCanvas.width / boxWidth * .9, imCanvas.height / boxHeight * .9, 8)
this.canvasZoom = zoomFactor
this.canvasOffset.x = -(boxMidX * zoomFactor) + imCanvas.width / 2

View File

@@ -46,11 +46,11 @@ export default {
let rawBoxes = []
let rawScores = []
for (var i = 0; i < rawRes.length; i++) {
var getScores = rawRes[i].slice(4)
for (let i = 0; i < rawRes.length; i++) {
const getScores = rawRes[i].slice(4)
if (getScores.every( s => s < .05)) { continue }
var getBox = rawRes[i].slice(0,4)
var boxCalc = [
const getBox = rawRes[i].slice(0,4)
const boxCalc = [
(getBox[0] - (getBox[2] / 2)) / modelWidth,
(getBox[1] - (getBox[3] / 2)) / modelHeight,
(getBox[0] + (getBox[2] / 2)) / modelWidth,
@@ -69,7 +69,7 @@ export default {
let boxes_data = []
let scores_data = []
let classes_data = []
for (var c = 0; c < outputSize - 4; c++) {
for (let c = 0; c < outputSize - 4; c++) {
structureScores = rawScores.map(x => x[c])
tScores = tf.tensor1d(structureScores)
resBoxes = await tf.image.nonMaxSuppressionAsync(tBoxes,tScores,10,0.5,.05)
@@ -77,7 +77,7 @@ export default {
tf.dispose(resBoxes)
if (validBoxes) {
boxes_data.push(...rawBoxes.filter( (_, idx) => validBoxes.includes(idx)))
var outputScores = structureScores.filter( (_, idx) => validBoxes.includes(idx))
let outputScores = structureScores.filter( (_, idx) => validBoxes.includes(idx))
scores_data.push(...outputScores)
classes_data.push(...outputScores.fill(c))
}
@@ -88,11 +88,11 @@ export default {
tf.dispose(tScores)
tf.dispose(tRes)
const valid_detections_data = classes_data.length
var output = {
const output = {
detections: []
}
for (var i =0; i < valid_detections_data; i++) {
var [dLeft, dTop, dRight, dBottom] = boxes_data[i]
for (let i =0; i < valid_detections_data; i++) {
const [dLeft, dTop, dRight, dBottom] = boxes_data[i]
output.detections.push({
"top": dTop,
"left": dLeft,
@@ -110,9 +110,9 @@ export default {
return output || { detections: [] }
},
getRemoteLabels() {
var self = this
var modelURL = `http://${this.serverSettings.address}:${this.serverSettings.port}/detectors`
var xhr = new XMLHttpRequest()
let self = this
const modelURL = `http://${this.serverSettings.address}:${this.serverSettings.port}/detectors`
let xhr = new XMLHttpRequest()
xhr.open("GET", modelURL)
xhr.setRequestHeader('Content-Type', 'application/json')
xhr.timeout = 10000
@@ -124,8 +124,8 @@ export default {
f7.dialog.alert(`ALVINN has encountered an error: ${errorResponse.error}`)
return
}
var detectors = JSON.parse(xhr.response).detectors
var findLabel = detectors
const detectors = JSON.parse(xhr.response).detectors
let findLabel = detectors
.find( d => { return d.name == self.detectorName } )?.labels
.filter( l => { return l != "" } ).sort()
.map( l => { return {'name': l, 'detect': true} } )
@@ -139,9 +139,9 @@ export default {
xhr.send()
},
remoteDetect() {
var self = this
var modelURL = `http://${this.serverSettings.address}:${this.serverSettings.port}/detect`
var xhr = new XMLHttpRequest()
let self = this
const modelURL = `http://${this.serverSettings.address}:${this.serverSettings.port}/detect`
let xhr = new XMLHttpRequest()
xhr.open("POST", modelURL)
xhr.timeout = 10000
xhr.ontimeout = this.remoteTimeout
@@ -158,7 +158,7 @@ export default {
self.uploadDirty = true
}
var doodsData = {
const doodsData = {
"detector_name": this.detectorName,
"detect": {
"*": 1
@@ -172,8 +172,8 @@ export default {
this.detecting = false
f7.dialog.alert('No connection to remote ALVINN instance. Please check app settings.')
},
async videoFrameDetect (vidData) {
await this.loadModel(this.miniLocation)
async videoFrameDetect (vidData, miniModel) {
await this.loadModel(miniModel)
const [modelWidth, modelHeight] = model.inputs[0].shape.slice(1, 3)
const imCanvas = this.$refs.image_cvs
const imageCtx = imCanvas.getContext("2d")
@@ -182,8 +182,7 @@ export default {
imCanvas.width = imCanvas.clientWidth
imCanvas.height = imCanvas.clientHeight
imageCtx.clearRect(0,0,imCanvas.width,imCanvas.height)
var imgWidth
var imgHeight
let imgWidth, imgHeight
const imgAspect = vidData.width / vidData.height
const rendAspect = imCanvas.width / imCanvas.height
if (imgAspect >= rendAspect) {
@@ -204,7 +203,7 @@ export default {
let rawCoords = []
if (rawRes) {
for (var i = 0; i < rawRes.length; i++) {
for (let i = 0; i < rawRes.length; i++) {
let getScores = rawRes[i].slice(4)
if (getScores.some( s => s > .5)) {
let foundTarget = rawRes[i].slice(0,2)
@@ -214,7 +213,7 @@ export default {
}
imageCtx.clearRect(0,0,imCanvas.width,imCanvas.height)
for (var coord of rawCoords) {
for (let coord of rawCoords) {
console.log(`x: ${coord[0]}, y: ${coord[1]}`)
let pointX = (imCanvas.width - imgWidth) / 2 + (coord[0] / modelWidth) * imgWidth -5
let pointY = (imCanvas.height - imgHeight) / 2 + (coord[1] / modelHeight) * imgHeight -5

View File

@@ -91,7 +91,7 @@
computed: {
otherIp () {
let filteredIps = {}
for (var oldIp in this.serverSettings.previous) {
for (let oldIp in this.serverSettings.previous) {
if (oldIp != this.serverSettings.address) {
filteredIps[oldIp] = this.serverSettings.previous[oldIp]
}
@@ -109,12 +109,12 @@
}
},
created () {
var loadServerSettings = localStorage.getItem('serverSettings')
const loadServerSettings = localStorage.getItem('serverSettings')
if (loadServerSettings) this.serverSettings = JSON.parse(loadServerSettings)
if (!this.serverSettings.previous) this.serverSettings.previous = {}
var loadThemeSettings = localStorage.getItem('themeSettings')
const loadThemeSettings = localStorage.getItem('themeSettings')
if (loadThemeSettings) this.themeSettings = JSON.parse(loadThemeSettings)
var loadOtherSettings = localStorage.getItem('otherSettings')
const loadOtherSettings = localStorage.getItem('otherSettings')
if (loadOtherSettings) this.otherSettings = JSON.parse(loadOtherSettings)
},
methods: {
@@ -136,7 +136,7 @@
)
saveSetting.then(
() => {
var toast = f7.toast.create({
const toast = f7.toast.create({
text: 'Settings saved',
closeTimeout: 2000
})
@@ -144,7 +144,7 @@
this.isDirty = false;
},
() => {
var toast = f7.toast.create({
const toast = f7.toast.create({
text: 'ERROR: No settings saved',
closeTimeout: 2000
})

View File

@@ -63,7 +63,7 @@
return store()
},
created () {
var loadOtherSettings = localStorage.getItem('otherSettings')
const loadOtherSettings = localStorage.getItem('otherSettings')
if (loadOtherSettings) this.otherSettings = JSON.parse(loadOtherSettings)
fetch(`${this.isCordova ? 'https://localhost' : '.'}/models/thorax/descript.json`)
.then((mod) => { return mod.json() })

View File

@@ -5,8 +5,8 @@ export default {
newUid (length) {
const uidLength = length || 16
const uidChars = 'abcdefghijklmnopqrstuvwxyz0123456789'
var uid = []
for (var i = 0; i < uidLength; i++) {
let uid = []
for (let i = 0; i < uidLength; i++) {
uid.push(uidChars.charAt(Math.floor(Math.random() * ((i < 4) ? 26 : 36))))
}
return uid.join('')
@@ -14,24 +14,23 @@ export default {
uploadData (imagePayload, classPayload, prevUid) {
let uploadImage = new Promise (resolve => {
const dataUid = prevUid || this.newUid(16)
var byteChars = window.atob(imagePayload)
var byteArrays = []
var len = byteChars.length
let byteChars = window.atob(imagePayload)
let byteArrays = []
for (var offset = 0; offset < len; offset += 1024) {
var slice = byteChars.slice(offset, offset + 1024)
var byteNumbers = new Array(slice.length)
for (var i = 0; i < slice.length; i++) {
for (let offset = 0; offset < byteChars.length; offset += 1024) {
let slice = byteChars.slice(offset, offset + 1024)
let byteNumbers = new Array(slice.length)
for (let i = 0; i < slice.length; i++) {
byteNumbers[i] = slice.charCodeAt(i)
}
var byteArray = new Uint8Array(byteNumbers)
let byteArray = new Uint8Array(byteNumbers)
byteArrays.push(byteArray)
}
var imageBlob = new Blob(byteArrays, {type: 'image/jpeg'})
const imageBlob = new Blob(byteArrays, {type: 'image/jpeg'})
var xhrJpg = new XMLHttpRequest()
var uploadUrl = `https://nextcloud.azgeorgis.net/public.php/webdav/${dataUid}.jpeg`
let xhrJpg = new XMLHttpRequest()
let uploadUrl = `https://nextcloud.azgeorgis.net/public.php/webdav/${dataUid}.jpeg`
xhrJpg.open("PUT", uploadUrl)
xhrJpg.setRequestHeader('Content-Type', 'image/jpeg')
xhrJpg.setRequestHeader('X-Method-Override', 'PUT')
@@ -39,8 +38,8 @@ export default {
xhrJpg.setRequestHeader("Authorization", "Basic " + btoa("LKBm3H6JdSaywyg:"))
xhrJpg.send(imageBlob)
var xhrTxt = new XMLHttpRequest()
var uploadUrl = `https://nextcloud.azgeorgis.net/public.php/webdav/${dataUid}.txt`
let xhrTxt = new XMLHttpRequest()
uploadUrl = `https://nextcloud.azgeorgis.net/public.php/webdav/${dataUid}.txt`
xhrTxt.open("PUT", uploadUrl)
xhrTxt.setRequestHeader('Content-Type', 'text/plain')
xhrTxt.setRequestHeader('X-Method-Override', 'PUT')
@@ -51,7 +50,7 @@ export default {
resolve(dataUid)
})
return uploadImage.then((newUid) => {
var toast = f7.toast.create({
const toast = f7.toast.create({
text: 'Detections Uploaded: thank you.',
closeTimeout: 2000
})

View File

@@ -25,7 +25,6 @@ export default {
moveTouch(event) {
switch (event.touches.length) {
case 1:
console.log(event)
this.canvasOffset.x += event.touches[0].clientX - this.touchPrevious.x
this.canvasOffset.y += event.touches[0].clientY - this.touchPrevious.y
this.touchPrevious = {x: event.touches[0].clientX, y: event.touches[0].clientY}