Compare commits
1 Commits
main
...
47363b9a4c
| Author | SHA1 | Date | |
|---|---|---|---|
| 47363b9a4c |
@@ -75,18 +75,14 @@ async function localDetect(imageData) {
|
||||
|
||||
console.time('sw: post-process')
|
||||
const outputSize = res.shape[1]
|
||||
const output = {
|
||||
detections: []
|
||||
}
|
||||
let rawBoxes = []
|
||||
let rawScores = []
|
||||
let getScores, getBox, boxCalc
|
||||
|
||||
for (let i = 0; i < rawRes.length; i++) {
|
||||
getScores = rawRes[i].slice(4)
|
||||
for (var i = 0; i < rawRes.length; i++) {
|
||||
var getScores = rawRes[i].slice(4)
|
||||
if (getScores.every( s => s < .05)) { continue }
|
||||
getBox = rawRes[i].slice(0,4)
|
||||
boxCalc = [
|
||||
var getBox = rawRes[i].slice(0,4)
|
||||
var boxCalc = [
|
||||
(getBox[0] - (getBox[2] / 2)) / modelWidth,
|
||||
(getBox[1] - (getBox[3] / 2)) / modelHeight,
|
||||
(getBox[0] + (getBox[2] / 2)) / modelWidth,
|
||||
@@ -105,7 +101,7 @@ async function localDetect(imageData) {
|
||||
let boxes_data = []
|
||||
let scores_data = []
|
||||
let classes_data = []
|
||||
for (let c = 0; c < outputSize - 4; c++) {
|
||||
for (var c = 0; c < outputSize - 4; c++) {
|
||||
structureScores = rawScores.map(x => x[c])
|
||||
tScores = tf.tensor1d(structureScores)
|
||||
resBoxes = await tf.image.nonMaxSuppressionAsync(tBoxes,tScores,10,0.5,.05)
|
||||
@@ -113,7 +109,7 @@ async function localDetect(imageData) {
|
||||
tf.dispose(resBoxes)
|
||||
if (validBoxes) {
|
||||
boxes_data.push(...rawBoxes.filter( (_, idx) => validBoxes.includes(idx)))
|
||||
let outputScores = structureScores.filter( (_, idx) => validBoxes.includes(idx))
|
||||
var outputScores = structureScores.filter( (_, idx) => validBoxes.includes(idx))
|
||||
scores_data.push(...outputScores)
|
||||
classes_data.push(...outputScores.fill(c))
|
||||
}
|
||||
@@ -123,15 +119,18 @@ async function localDetect(imageData) {
|
||||
tf.dispose(tBoxes)
|
||||
tf.dispose(tScores)
|
||||
tf.dispose(tRes)
|
||||
tf.dispose(resBoxes)
|
||||
const valid_detections_data = classes_data.length
|
||||
for (let i =0; i < valid_detections_data; i++) {
|
||||
let [dLeft, dTop, dRight, dBottom] = boxes_data[i]
|
||||
var output = {
|
||||
detections: []
|
||||
}
|
||||
for (var i =0; i < valid_detections_data; i++) {
|
||||
var [dLeft, dTop, dRight, dBottom] = boxes_data[i]
|
||||
output.detections.push({
|
||||
"top": dTop,
|
||||
"left": dLeft,
|
||||
"bottom": dBottom,
|
||||
"right": dRight,
|
||||
// "label": this.detectorLabels[classes_data[i]].name,
|
||||
"label": classes_data[i],
|
||||
"confidence": scores_data[i] * 100
|
||||
})
|
||||
@@ -156,7 +155,7 @@ async function videoFrame (vidData) {
|
||||
const rawRes = tf.transpose(res,[0,2,1]).arraySync()[0]
|
||||
|
||||
if (rawRes) {
|
||||
for (let i = 0; i < rawRes.length; i++) {
|
||||
for (var i = 0; i < rawRes.length; i++) {
|
||||
let getScores = rawRes[i].slice(4)
|
||||
if (getScores.some( s => s > .5)) {
|
||||
let foundTarget = rawRes[i].slice(0,2)
|
||||
|
||||
@@ -74,14 +74,6 @@
|
||||
}
|
||||
},
|
||||
async created () {
|
||||
document.addEventListener('keydown', e => {
|
||||
if (e.code == 'KeyR') {
|
||||
console.log(f7.views.main.router.history)
|
||||
}
|
||||
if (e.code == 'KeyB') {
|
||||
f7.views.main.router.back()
|
||||
}
|
||||
})
|
||||
if (!window.cordova) {
|
||||
const confText = await fetch('./conf/conf.yaml')
|
||||
.then((mod) => { return mod.text() })
|
||||
|
||||
@@ -17,9 +17,6 @@
|
||||
<path v-else-if="icon == 'head'" d="M194-80v-395h80v315h280v-193l105-105q29-29 45-65t16-77q0-40-16.5-76T659-741l-25-26-127 127H347l-43 43-57-56 67-67h160l160-160 82 82q40 40 62 90.5T800-600q0 57-22 107.5T716-402l-82 82v240H194Zm197-187L183-475q-11-11-17-26t-6-31q0-16 6-30.5t17-25.5l84-85 124 123q28 28 43.5 64.5T450-409q0 40-15 76.5T391-267Z"/>
|
||||
<path v-else-if="icon == 'photo_sample'" d="M240-80q-33 0-56.5-23.5T160-160v-640q0-33 23.5-56.5T240-880h480q33 0 56.5 23.5T800-800v640q0 33-23.5 56.5T720-80H240Zm0-80h480v-640h-80v280l-100-60-100 60v-280H240v640Zm40-80h400L545-420 440-280l-65-87-95 127Zm-40 80v-640 640Zm200-360 100-60 100 60-100-60-100 60Z"/>
|
||||
<path v-else-if="icon == 'reset_slide'" d="M520-330v-60h160v60H520Zm60 210v-50h-60v-60h60v-50h60v160h-60Zm100-50v-60h160v60H680Zm40-110v-160h60v50h60v60h-60v50h-60Zm111-280h-83q-26-88-99-144t-169-56q-117 0-198.5 81.5T200-480q0 72 32.5 132t87.5 98v-110h80v240H160v-80h94q-62-50-98-122.5T120-480q0-75 28.5-140.5t77-114q48.5-48.5 114-77T480-840q129 0 226.5 79.5T831-560Z"/>
|
||||
<path v-else-if="icon == 'zoom_to'" d="M440-40v-167l-44 43-56-56 140-140 140 140-56 56-44-43v167h-80ZM220-340l-56-56 43-44H40v-80h167l-43-44 56-56 140 140-140 140Zm520 0L600-480l140-140 56 56-43 44h167v80H753l43 44-56 56Zm-260-80q-25 0-42.5-17.5T420-480q0-25 17.5-42.5T480-540q25 0 42.5 17.5T540-480q0 25-17.5 42.5T480-420Zm0-180L340-740l56-56 44 43v-167h80v167l44-43 56 56-140 140Z"/>
|
||||
<path v-else-if="icon == 'reset_zoom'" d="M480-320v-100q0-25 17.5-42.5T540-480h100v60H540v100h-60Zm60 240q-25 0-42.5-17.5T480-140v-100h60v100h100v60H540Zm280-240v-100H720v-60h100q25 0 42.5 17.5T880-420v100h-60ZM720-80v-60h100v-100h60v100q0 25-17.5 42.5T820-80H720Zm111-480h-83q-26-88-99-144t-169-56q-117 0-198.5 81.5T200-480q0 72 32.5 132t87.5 98v-110h80v240H160v-80h94q-62-50-98-122.5T120-480q0-75 28.5-140.5t77-114q48.5-48.5 114-77T480-840q129 0 226.5 79.5T831-560Z"/>
|
||||
<path v-else-if="icon == 'clipboard'" d="M200-120q-33 0-56.5-23.5T120-200v-560q0-33 23.5-56.5T200-840h167q11-35 43-57.5t70-22.5q40 0 71.5 22.5T594-840h166q33 0 56.5 23.5T840-760v560q0 33-23.5 56.5T760-120H200Zm0-80h560v-560h-80v120H280v-120h-80v560Zm280-560q17 0 28.5-11.5T520-800q0-17-11.5-28.5T480-840q-17 0-28.5 11.5T440-800q0 17 11.5 28.5T480-760Z"/>
|
||||
</svg>
|
||||
</template>
|
||||
|
||||
@@ -47,10 +44,7 @@
|
||||
'limbs',
|
||||
'head',
|
||||
'photo_sample',
|
||||
'reset_slide',
|
||||
'zoom_to',
|
||||
'reset_zoom',
|
||||
'clipboard'
|
||||
'reset_slide'
|
||||
]
|
||||
return iconList.includes(value)
|
||||
}
|
||||
|
||||
@@ -150,7 +150,8 @@
|
||||
.structure-info {
|
||||
position: absolute;
|
||||
z-index: 3;
|
||||
color: #0f206c;
|
||||
color: rgb(15, 32, 108);
|
||||
background: yellow;
|
||||
border-radius: 100%;
|
||||
}
|
||||
|
||||
|
||||
@@ -18,7 +18,7 @@
|
||||
<meta name="msapplication-tap-highlight" content="no">
|
||||
<title>ALVINN</title>
|
||||
<% if (TARGET === 'web') { %>
|
||||
<meta name="mobile-web-app-capable" content="yes">
|
||||
<meta name="apple-mobile-web-app-capable" content="yes">
|
||||
<meta name="apple-mobile-web-app-status-bar-style" content="black-translucent">
|
||||
<link rel="apple-touch-icon" href="icons/apple-touch-icon.png">
|
||||
<link rel="icon" href="icons/favicon.png">
|
||||
|
||||
@@ -1,157 +0,0 @@
|
||||
class Coordinate {
|
||||
constructor(x, y) {
|
||||
this.x = x
|
||||
this.y = y
|
||||
}
|
||||
|
||||
toRefFrame(...frameArgs) {
|
||||
if (frameArgs.length == 0) {
|
||||
return {x: this.x, y: this.y}
|
||||
}
|
||||
let outFrames = []
|
||||
//Get Coordinates in Image Reference Frame
|
||||
if (frameArgs[0].tagName == 'IMG' && frameArgs[0].width && frameArgs[0].height) {
|
||||
outFrames.push({
|
||||
x: this.x * frameArgs[0].width,
|
||||
y: this.y * frameArgs[0].height
|
||||
})
|
||||
} else {
|
||||
throw new Error('Coordinate: invalid reference frame for frameType: Image')
|
||||
}
|
||||
//Get Coordinates in Canvas Reference Frame
|
||||
if (frameArgs[1]) {
|
||||
if (frameArgs[1].tagName == 'CANVAS' && frameArgs[1].width && frameArgs[1].height) {
|
||||
let imgWidth
|
||||
let imgHeight
|
||||
const imgAspect = frameArgs[0].width / frameArgs[0].height
|
||||
const rendAspect = frameArgs[1].width / frameArgs[1].height
|
||||
if (imgAspect >= rendAspect) {
|
||||
imgWidth = frameArgs[1].width
|
||||
imgHeight = frameArgs[1].width / imgAspect
|
||||
} else {
|
||||
imgWidth = frameArgs[1].height * imgAspect
|
||||
imgHeight = frameArgs[1].height
|
||||
}
|
||||
outFrames.push({
|
||||
x: (frameArgs[1].width - imgWidth) / 2 + this.x * imgWidth,
|
||||
y: (frameArgs[1].height - imgHeight) / 2 + this.y * imgHeight
|
||||
})
|
||||
} else {
|
||||
throw new Error('Coordinate: invalid reference frame for frameType: Canvas')
|
||||
}
|
||||
}
|
||||
//Get Coordinates in Screen Reference Frame
|
||||
if (frameArgs[2]) {
|
||||
if (frameArgs[2].zoom && frameArgs[2].offset && frameArgs[2].offset.x !== undefined && frameArgs[2].offset.y !== undefined) {
|
||||
outFrames.push({
|
||||
x: outFrames[1].x * frameArgs[2].zoom + frameArgs[2].offset.x,
|
||||
y: outFrames[1].y * frameArgs[2].zoom + frameArgs[2].offset.y
|
||||
})
|
||||
} else {
|
||||
throw new Error('Coordinate: invalid reference frame for frameType: Screen')
|
||||
}
|
||||
}
|
||||
|
||||
return outFrames
|
||||
}
|
||||
|
||||
toString() {
|
||||
return `(x: ${this.x}, y: ${this.y})`
|
||||
}
|
||||
}
|
||||
|
||||
export class StructureBox {
|
||||
constructor(top, left, bottom, right) {
|
||||
this.topLeft = new Coordinate(left, top)
|
||||
this.bottomRight = new Coordinate(right, bottom)
|
||||
}
|
||||
|
||||
getBoxes(boxType, ...frameArgs) {
|
||||
let lowerH, lowerV, calcSide
|
||||
switch (boxType) {
|
||||
case 'point':
|
||||
lowerH = 'right'
|
||||
lowerV = 'bottom'
|
||||
break
|
||||
case 'side':
|
||||
lowerH = 'width'
|
||||
lowerV = 'height'
|
||||
calcSide = true
|
||||
break
|
||||
default:
|
||||
throw new Error(`StructureBox: invalid boxType - ${boxType}`)
|
||||
}
|
||||
if (frameArgs.length == 0) {
|
||||
return {
|
||||
left: this.topLeft.x,
|
||||
top: this.topLeft.y,
|
||||
[lowerH]: this.bottomRight.x - ((calcSide) ? this.topLeft.x : 0),
|
||||
[lowerV]: this.bottomRight.y - ((calcSide) ? this.topLeft.y : 0)
|
||||
}
|
||||
}
|
||||
const tL = this.topLeft.toRefFrame(...frameArgs)
|
||||
const bR = this.bottomRight.toRefFrame(...frameArgs)
|
||||
let outBoxes = []
|
||||
tL.forEach((cd, i) => {
|
||||
outBoxes.push({
|
||||
left: cd.x,
|
||||
top: cd.y,
|
||||
[lowerH]: bR[i].x - ((calcSide) ? cd.x : 0),
|
||||
[lowerV]: bR[i].y - ((calcSide) ? cd.y : 0)
|
||||
})
|
||||
})
|
||||
return outBoxes
|
||||
}
|
||||
}
|
||||
|
||||
export class Structure {
|
||||
constructor(structResult) {
|
||||
this.label = structResult.label
|
||||
this.confidence = structResult.confidence
|
||||
this.box = new StructureBox(
|
||||
structResult.top,
|
||||
structResult.left,
|
||||
structResult.bottom,
|
||||
structResult.right
|
||||
)
|
||||
this.deleted = false
|
||||
this.index = -1
|
||||
this.passThreshold = true
|
||||
this.searched = false
|
||||
}
|
||||
|
||||
get resultIndex() {
|
||||
return this.index
|
||||
}
|
||||
|
||||
set resultIndex(newIdx) {
|
||||
this.index = newIdx
|
||||
}
|
||||
|
||||
get isDeleted() {
|
||||
return this.deleted
|
||||
}
|
||||
|
||||
set isDeleted(del) {
|
||||
this.deleted = !!del
|
||||
}
|
||||
|
||||
get isSearched() {
|
||||
return this.searched
|
||||
}
|
||||
|
||||
set isSearched(ser) {
|
||||
this.searched = !!ser
|
||||
}
|
||||
|
||||
get aboveThreshold() {
|
||||
return this.passThreshold
|
||||
}
|
||||
|
||||
setThreshold(level) {
|
||||
if (typeof level != 'number') {
|
||||
throw new Error(`Structure: invalid threshold level ${level}`)
|
||||
}
|
||||
this.passThreshold = this.confidence >= level
|
||||
}
|
||||
}
|
||||
@@ -3,11 +3,11 @@ import { f7 } from 'framework7-vue'
|
||||
export default {
|
||||
methods: {
|
||||
async openCamera(imContain) {
|
||||
let cameraLoaded = false
|
||||
var cameraLoaded = false
|
||||
const devicesList = await navigator.mediaDevices.enumerateDevices()
|
||||
let videoDeviceAvailable = devicesList.some( d => d.kind == "videoinput")
|
||||
if (videoDeviceAvailable) {
|
||||
let vidConstraint = {
|
||||
this.videoDeviceAvailable = devicesList.some( d => d.kind == "videoinput")
|
||||
if (this.videoDeviceAvailable) {
|
||||
var vidConstraint = {
|
||||
video: {
|
||||
width: {
|
||||
ideal: imContain.offsetWidth
|
||||
@@ -41,24 +41,24 @@ export default {
|
||||
tempCtx.drawImage(vidViewer, 0, 0)
|
||||
this.getImage(tempCVS.toDataURL())
|
||||
},
|
||||
async videoFrameDetectWorker (vidData, vidWorker) {
|
||||
async videoFrameDetectWorker (vidData) {
|
||||
const startDetection = () => {
|
||||
createImageBitmap(vidData).then(imVideoFrame => {
|
||||
vidWorker.postMessage({call: 'videoFrame', image: imVideoFrame}, [imVideoFrame])
|
||||
this.vidWorker.postMessage({call: 'videoFrame', image: imVideoFrame}, [imVideoFrame])
|
||||
})
|
||||
}
|
||||
vidData.addEventListener('resize',startDetection,{once: true})
|
||||
vidWorker.onmessage = (eVid) => {
|
||||
this.vidWorker.onmessage = (eVid) => {
|
||||
if (eVid.data.error) {
|
||||
console.log(eVid.data.message)
|
||||
f7.dialog.alert(`ALVINN AI model error: ${eVid.data.message}`)
|
||||
} else if (this.videoAvailable) {
|
||||
createImageBitmap(vidData).then(imVideoFrame => {
|
||||
vidWorker.postMessage({call: 'videoFrame', image: imVideoFrame}, [imVideoFrame])
|
||||
this.vidWorker.postMessage({call: 'videoFrame', image: imVideoFrame}, [imVideoFrame])
|
||||
})
|
||||
if (eVid.data.coords) {
|
||||
imageCtx.clearRect(0,0,imCanvas.width,imCanvas.height)
|
||||
for (let coord of eVid.data.coords) {
|
||||
for (var coord of eVid.data.coords) {
|
||||
let pointX = (imCanvas.width - imgWidth) / 2 + (coord[0] / eVid.data.modelWidth) * imgWidth - 10
|
||||
let pointY = (imCanvas.height - imgHeight) / 2 + (coord[1] / eVid.data.modelHeight) * imgHeight - 10
|
||||
console.debug(`cx: ${pointX}, cy: ${pointY}`)
|
||||
@@ -72,7 +72,8 @@ export default {
|
||||
const imCanvas = this.$refs.image_cvs
|
||||
const imageCtx = imCanvas.getContext("2d")
|
||||
const target = this.$refs.target_image
|
||||
let imgWidth, imgHeight
|
||||
var imgWidth
|
||||
var imgHeight
|
||||
f7.utils.nextFrame(() => {
|
||||
imCanvas.width = imCanvas.clientWidth
|
||||
imCanvas.height = imCanvas.clientHeight
|
||||
|
||||
@@ -56,7 +56,7 @@
|
||||
},
|
||||
computed: {
|
||||
commentText () {
|
||||
let text = f7.textEditor.get('.comment-editor').getValue()
|
||||
var text = f7.textEditor.get('.comment-editor').getValue()
|
||||
if (this.userEmail) {
|
||||
text += `\\n\\nSubmitted by: ${this.userEmail}`
|
||||
}
|
||||
@@ -65,9 +65,9 @@
|
||||
},
|
||||
methods: {
|
||||
sendFeedback () {
|
||||
let self = this
|
||||
const issueURL = `https://gitea.azgeorgis.net/api/v1/repos/Georgi_Lab/ALVINN_f7/issues?access_token=9af8ae15b1ee5a98afcb3083bb488e4cf3c683af`
|
||||
let xhr = new XMLHttpRequest()
|
||||
var self = this
|
||||
var issueURL = `https://gitea.azgeorgis.net/api/v1/repos/Georgi_Lab/ALVINN_f7/issues?access_token=9af8ae15b1ee5a98afcb3083bb488e4cf3c683af`
|
||||
var xhr = new XMLHttpRequest()
|
||||
xhr.open("POST", issueURL)
|
||||
xhr.setRequestHeader('Content-Type', 'application/json')
|
||||
xhr.setRequestHeader('accept', 'application/json')
|
||||
|
||||
@@ -1,36 +1,23 @@
|
||||
<template>
|
||||
<f7-page name="detect" :id="detectorName + '-detect-page'" @wheel="(e = $event) => e.preventDefault()" @touchmove="(e = $event) => e.preventDefault()">
|
||||
<f7-page name="detect" :id="detectorName + '-detect-page'">
|
||||
<!-- Top Navbar -->
|
||||
<f7-navbar :sliding="false" :back-link="true" back-link-url="/" back-link-force>
|
||||
<f7-nav-title sliding>{{ regionTitle }}</f7-nav-title>
|
||||
<f7-nav-title sliding>{{ regions[activeRegion] }}</f7-nav-title>
|
||||
<f7-nav-right>
|
||||
<f7-link v-if="!isCordova" :icon-only="true" tooltip="Fullscreen" :icon-f7="isFullscreen ? 'viewfinder_circle_fill' : 'viewfinder'" @click="toggleFullscreen"></f7-link>
|
||||
<f7-link :icon-only="true" tooltip="ALVINN help" icon-f7="question_circle_fill" href="/help/"></f7-link>
|
||||
</f7-nav-right>
|
||||
</f7-navbar>
|
||||
<f7-block class="detect-grid">
|
||||
<!--<div style="position: absolute;">{{ debugInfo ? JSON.stringify(debugInfo) : "No Info Available" }}</div>-->
|
||||
<div class="image-container" ref="image_container">
|
||||
<SvgIcon v-if="!imageView.src && !videoAvailable" :icon="f7route.params.region" fill-color="var(--avn-theme-color)"/>
|
||||
<div class="vid-container" :style="`display: ${videoAvailable ? 'block' : 'none'}; position: absolute; width: 100%; height: 100%;`">
|
||||
<video id="vid-view" ref="vid_viewer" :srcObject="cameraStream" :autoPlay="true" style="width: 100%; height: 100%"></video>
|
||||
<f7-button @click="captureVidFrame()" style="position: absolute; bottom: 32px; left: 50%; transform: translateX(-50%); z-index: 3;" fill large>Capture</f7-button>
|
||||
</div>
|
||||
<canvas
|
||||
id="im-draw"
|
||||
ref="image_cvs"
|
||||
@wheel="spinWheel($event)"
|
||||
@mousedown.middle="startMove($event)"
|
||||
@mousemove="makeMove($event)"
|
||||
@mouseup.middle="endMove($event)"
|
||||
@touchstart="startTouch($event)"
|
||||
@touchend="endTouch($event)"
|
||||
@touchmove="moveTouch($event)"
|
||||
@click="structureClick"
|
||||
:style="`display: ${(imageLoaded || videoAvailable) ? 'block' : 'none'}; flex: 1 1 0%; max-width: 100%; max-height: 100%; min-width: 0; min-height: 0; background-size: contain; background-position: center; background-repeat: no-repeat; z-index: 2;`"
|
||||
></canvas>
|
||||
<f7-link v-if="getInfoUrl && (selectedChip > -1) && showResults[selectedChip]"
|
||||
:style="`left: ${infoLinkPos.x}px; top: ${infoLinkPos.y}px; transform: translate(-50%,-50%); background: hsla(${showResults[selectedChip].confidence / 100 * 120}deg, 100%, 50%, .5)`"
|
||||
<canvas id="im-draw" ref="image_cvs" @click="structureClick" :style="`display: ${(imageLoaded || videoAvailable) ? 'block' : 'none'}; flex: 1 1 0%; max-width: 100%; max-height: 100%; min-width: 0; min-height: 0; background-size: contain; background-position: center; background-repeat: no-repeat; z-index: 2;`" />
|
||||
<f7-link v-if="getInfoUrl && (selectedChip > -1)"
|
||||
:style="`left: ${infoLinkPos.x}px; top: ${infoLinkPos.y}px; transform: translate(calc(-50% - ${infoLinkPos.adj}px),calc(-50% - ${infoLinkPos.adj}px));`"
|
||||
class="structure-info"
|
||||
:icon-only="true"
|
||||
icon-f7="info"
|
||||
@@ -74,19 +61,16 @@
|
||||
</f7-button>
|
||||
</div>
|
||||
<f7-segmented class="image-menu" raised>
|
||||
<f7-button popover-open="#region-popover">
|
||||
<RegionIcon :region="activeRegion" :iconSet="getIconSet" />
|
||||
</f7-button>
|
||||
<f7-button v-if="!videoAvailable" :class="(!modelLoading) ? '' : 'disabled'" popover-open="#capture-popover">
|
||||
<SvgIcon icon="camera_add"/>
|
||||
</f7-button>
|
||||
<f7-button v-if="videoAvailable" @click="closeCamera()">
|
||||
<SvgIcon icon="no_photography"/>
|
||||
</f7-button>
|
||||
<f7-button v-if="!structureZoomed && selectedChip >= 0" style="height: auto; width: auto;" popover-close="#image-popover" @click="zoomToSelected()">
|
||||
<SvgIcon icon="zoom_to" />
|
||||
</f7-button>
|
||||
<f7-button v-else :class="(canvasZoom != 1) ? '' : 'disabled'" style="height: auto; width: auto;" popover-close="#image-popover" @click="resetZoom()">
|
||||
<SvgIcon icon="reset_zoom" />
|
||||
</f7-button>
|
||||
<f7-button @click="toggleSettings()" :class="(imageLoaded) ? '' : 'disabled'">
|
||||
<f7-button @click="() => showDetectSettings = !showDetectSettings" :class="(imageLoaded) ? '' : 'disabled'">
|
||||
<SvgIcon icon="visibility"/>
|
||||
<f7-badge v-if="numResults && (showResults.length != numResults)" color="red" style="position: absolute; right: 15%; top: 15%;">{{ showResults.length - numResults }}</f7-badge>
|
||||
</f7-button>
|
||||
@@ -109,6 +93,23 @@
|
||||
</f7-page>
|
||||
</f7-panel>
|
||||
|
||||
<f7-popover id="region-popover" class="popover-button-menu">
|
||||
<f7-segmented raised class="segment-button-menu">
|
||||
<f7-button :class="(getRegions.includes('thorax')) ? '' : ' disabled'" style="height: auto; width: auto;" href="/detect/thorax/" popover-close="#region-popover">
|
||||
<RegionIcon :region="0" :iconSet="getIconSet" />
|
||||
</f7-button>
|
||||
<f7-button :class="(getRegions.includes('abdomen')) ? '' : ' disabled'" style="height: auto; width: auto;" href="/detect/abdomen/" popover-close="#region-popover">
|
||||
<RegionIcon :region="1" :iconSet="getIconSet" />
|
||||
</f7-button>
|
||||
<f7-button :class="(getRegions.includes('limbs')) ? '' : ' disabled'" style="height: auto; width: auto;" href="/detect/limbs/" popover-close="#region-popover">
|
||||
<RegionIcon :region="2" :iconSet="getIconSet" />
|
||||
</f7-button>
|
||||
<f7-button :class="(getRegions.includes('head')) ? '' : ' disabled'" style="height: auto; width: auto;" href="/detect/head/" popover-close="#region-popover">
|
||||
<RegionIcon :region="3" :iconSet="getIconSet" />
|
||||
</f7-button>
|
||||
</f7-segmented>
|
||||
</f7-popover>
|
||||
|
||||
<f7-popover id="capture-popover" class="popover-button-menu">
|
||||
<f7-segmented raised class="segment-button-menu">
|
||||
<f7-button style="height: auto; width: auto;" popover-close="#capture-popover" @click="selectImage('camera')">
|
||||
@@ -117,9 +118,6 @@
|
||||
<f7-button style="height: auto; width: auto;" popover-close="#capture-popover" @click="selectImage('file')">
|
||||
<SvgIcon icon="photo_library" />
|
||||
</f7-button>
|
||||
<f7-button v-if="secureProtocol" style="height: auto; width: auto;" popover-close="#capture-popover" @click="selectImage('clipboard')">
|
||||
<SvgIcon icon="clipboard" />
|
||||
</f7-button>
|
||||
<f7-button v-if="demoEnabled" style="height: auto; width: auto;" popover-close="#capture-popover" @click="selectImage('sample')">
|
||||
<SvgIcon icon="photo_sample"/>
|
||||
</f7-button>
|
||||
@@ -141,27 +139,11 @@
|
||||
import submitMixin from './submit-mixin'
|
||||
import detectionMixin from './detection-mixin'
|
||||
import cameraMixin from './camera-mixin'
|
||||
import touchMixin from './touch-mixin'
|
||||
|
||||
import detectionWorker from '@/assets/detect-worker.js?worker&inline'
|
||||
import { Structure, StructureBox } from '../js/structures'
|
||||
|
||||
const regions = ['Thorax','Abdomen/Pelvis','Limbs','Head and Neck']
|
||||
let activeRegion = 4
|
||||
let classesList = []
|
||||
let imageLoadMode = "environment"
|
||||
let serverSettings = {}
|
||||
let otherSettings = {}
|
||||
let modelLocation = ''
|
||||
let miniLocation = ''
|
||||
let reloadModel = false
|
||||
let detectWorker = null
|
||||
let vidWorker = null
|
||||
let canvasMoving = false
|
||||
let imageLocation = new StructureBox(0, 0, 1, 1)
|
||||
|
||||
export default {
|
||||
mixins: [submitMixin, detectionMixin, cameraMixin, touchMixin],
|
||||
mixins: [submitMixin, detectionMixin, cameraMixin],
|
||||
props: {
|
||||
f7route: Object,
|
||||
},
|
||||
@@ -171,28 +153,36 @@
|
||||
},
|
||||
data () {
|
||||
return {
|
||||
regions: ['Thorax','Abdomen/Pelvis','Limbs','Head and Neck'],
|
||||
resultData: {},
|
||||
selectedChip: -1,
|
||||
activeRegion: 4,
|
||||
classesList: [],
|
||||
imageLoaded: false,
|
||||
imageView: new Image(),
|
||||
imCvsLocation: {},
|
||||
imageLoadMode: "environment",
|
||||
detecting: false,
|
||||
detectPanel: false,
|
||||
showDetectSettings: false,
|
||||
detectorName: '',
|
||||
detectorLevel: 50,
|
||||
detectorLabels: [],
|
||||
serverSettings: {},
|
||||
otherSettings: {},
|
||||
isCordova: !!window.cordova,
|
||||
secureProtocol: location.protocol == 'https:',
|
||||
uploadUid: null,
|
||||
uploadDirty: false,
|
||||
modelLocation: '',
|
||||
miniLocation: '',
|
||||
modelLoading: true,
|
||||
reloadModel: false,
|
||||
videoDeviceAvailable: false,
|
||||
videoAvailable: false,
|
||||
cameraStream: null,
|
||||
infoLinkPos: {},
|
||||
canvasOffset: {x: 0, y: 0},
|
||||
canvasZoom: 1,
|
||||
structureZoomed: false,
|
||||
debugInfo: null
|
||||
detectWorker: null,
|
||||
vidWorker: null
|
||||
}
|
||||
},
|
||||
setup() {
|
||||
@@ -200,44 +190,44 @@
|
||||
},
|
||||
created () {
|
||||
let loadOtherSettings = localStorage.getItem('otherSettings')
|
||||
if (loadOtherSettings) otherSettings = JSON.parse(loadOtherSettings)
|
||||
if (loadOtherSettings) this.otherSettings = JSON.parse(loadOtherSettings)
|
||||
this.detectorName = this.f7route.params.region
|
||||
switch (this.detectorName) {
|
||||
case 'thorax':
|
||||
activeRegion = 0
|
||||
this.activeRegion = 0
|
||||
break;
|
||||
case 'abdomen':
|
||||
activeRegion = 1
|
||||
this.activeRegion = 1
|
||||
break;
|
||||
case 'limbs':
|
||||
activeRegion = 2
|
||||
this.activeRegion = 2
|
||||
break;
|
||||
case 'head':
|
||||
activeRegion = 3
|
||||
this.activeRegion = 3
|
||||
break;
|
||||
}
|
||||
let modelJ = `../models/${this.detectorName}${otherSettings.mini ? '-mini' : ''}/model.json`
|
||||
let modelJ = `../models/${this.detectorName}${this.otherSettings.mini ? '-mini' : ''}/model.json`
|
||||
let miniJ = `../models/${this.detectorName}-mini/model.json`
|
||||
modelLocation = new URL(modelJ,import.meta.url).href
|
||||
miniLocation = new URL(miniJ,import.meta.url).href
|
||||
this.modelLocation = new URL(modelJ,import.meta.url).href
|
||||
this.miniLocation = new URL(miniJ,import.meta.url).href
|
||||
let classesJ = `../models/${this.detectorName}/classes.json`
|
||||
fetch(new URL(classesJ,import.meta.url).href)
|
||||
.then((mod) => { return mod.json() })
|
||||
.then((classes) => {
|
||||
classesList = classes
|
||||
this.detectorLabels = classesList.map( l => { return {'name': l, 'detect': true} } )
|
||||
this.classesList = classes
|
||||
this.detectorLabels = this.classesList.map( l => { return {'name': l, 'detect': true} } )
|
||||
})
|
||||
const loadServerSettings = localStorage.getItem('serverSettings')
|
||||
if (loadServerSettings) serverSettings = JSON.parse(loadServerSettings)
|
||||
var loadServerSettings = localStorage.getItem('serverSettings')
|
||||
if (loadServerSettings) this.serverSettings = JSON.parse(loadServerSettings)
|
||||
},
|
||||
mounted () {
|
||||
if (serverSettings && serverSettings.use) {
|
||||
if (this.serverSettings && this.serverSettings.use) {
|
||||
this.getRemoteLabels()
|
||||
this.modelLoading = false
|
||||
} else {
|
||||
this.modelLoading = true
|
||||
if (!this.useWorkers) {
|
||||
this.loadModel(modelLocation, true).then(() => {
|
||||
this.loadModel(this.modelLocation, true).then(() => {
|
||||
this.modelLoading = false
|
||||
}).catch((e) => {
|
||||
console.log(e.message)
|
||||
@@ -245,8 +235,8 @@
|
||||
this.modelLoading = false
|
||||
})
|
||||
} else {
|
||||
detectWorker = new detectionWorker()
|
||||
detectWorker.onmessage = (eMount) => {
|
||||
this.detectWorker = new detectionWorker()
|
||||
this.detectWorker.onmessage = (eMount) => {
|
||||
self = this
|
||||
if (eMount.data.error) {
|
||||
console.log(eMount.data.message)
|
||||
@@ -254,24 +244,21 @@
|
||||
}
|
||||
self.modelLoading = false
|
||||
}
|
||||
vidWorker = new detectionWorker()
|
||||
vidWorker.onmessage = (eMount) => {
|
||||
this.vidWorker = new detectionWorker()
|
||||
this.vidWorker.onmessage = (eMount) => {
|
||||
self = this
|
||||
if (eMount.data.error) {
|
||||
console.log(eMount.data.message)
|
||||
f7.dialog.alert(`ALVINN AI nano model error: ${eMount.data.message}`)
|
||||
}
|
||||
}
|
||||
detectWorker.postMessage({call: 'loadModel', weights: modelLocation, preload: true})
|
||||
vidWorker.postMessage({call: 'loadModel', weights: miniLocation, preload: true})
|
||||
this.detectWorker.postMessage({call: 'loadModel', weights: this.modelLocation, preload: true})
|
||||
this.vidWorker.postMessage({call: 'loadModel', weights: this.miniLocation, preload: true})
|
||||
}
|
||||
}
|
||||
window.onresize = (e) => { if (this.$refs.image_cvs) this.selectChip('redraw') }
|
||||
},
|
||||
computed: {
|
||||
regionTitle () {
|
||||
return regions[activeRegion]
|
||||
},
|
||||
message () {
|
||||
if (this.modelLoading) {
|
||||
return "Preparing ALVINN..."
|
||||
@@ -284,17 +271,17 @@
|
||||
}
|
||||
},
|
||||
showResults () {
|
||||
let filteredResults = this.resultData.detections
|
||||
var filteredResults = this.resultData.detections
|
||||
if (!filteredResults) return []
|
||||
|
||||
const allSelect = this.detectorLabels.every( s => { return s.detect } )
|
||||
const selectedLabels = this.detectorLabels
|
||||
var allSelect = this.detectorLabels.every( s => { return s.detect } )
|
||||
var selectedLabels = this.detectorLabels
|
||||
.filter( l => { return l.detect })
|
||||
.map( l => { return l.name })
|
||||
filteredResults.forEach( (d, i) => {
|
||||
d.resultIndex = i
|
||||
d.setThreshold(this.detectorLevel)
|
||||
d.isSearched = allSelect || selectedLabels.includes(d.label)
|
||||
filteredResults[i].resultIndex = i
|
||||
filteredResults[i].aboveThreshold = d.confidence >= this.detectorLevel
|
||||
filteredResults[i].isSearched = allSelect || selectedLabels.includes(d.label)
|
||||
})
|
||||
|
||||
if (!filteredResults.some( s => s.resultIndex == this.selectedChip && s.aboveThreshold && s.isSearched && !s.isDeleted)) {
|
||||
@@ -315,13 +302,13 @@
|
||||
}
|
||||
},
|
||||
demoEnabled () {
|
||||
return otherSettings.demo || this.demoMode
|
||||
return this.otherSettings.demo || this.demoMode
|
||||
},
|
||||
infoLinkTarget () {
|
||||
if (!this.getInfoUrl) return ''
|
||||
let structure = this.showResults.find( r => r.resultIndex == this.selectedChip)
|
||||
return structure ? this.getInfoUrl + structure.label.replaceAll(' ','_') : ''
|
||||
},
|
||||
}
|
||||
},
|
||||
methods: {
|
||||
chipGradient (confVal) {
|
||||
@@ -329,8 +316,8 @@
|
||||
return `--chip-media-gradient: conic-gradient(from ${270 - (confFactor * 360 / 2)}deg, hsl(${confFactor * 120}deg, 100%, 50%) ${confFactor}turn, hsl(${confFactor * 120}deg, 50%, 66%) ${confFactor}turn)`
|
||||
},
|
||||
async setData () {
|
||||
if (detectWorker) {
|
||||
detectWorker.onmessage = (eDetect) => {
|
||||
if (this.detectWorker) {
|
||||
this.detectWorker.onmessage = (eDetect) => {
|
||||
self = this
|
||||
if (eDetect.data.error) {
|
||||
self.detecting = false
|
||||
@@ -339,46 +326,41 @@
|
||||
f7.dialog.alert(`ALVINN structure finding error: ${eDetect.data.message}`)
|
||||
} else if (eDetect.data.success == 'detection') {
|
||||
self.detecting = false
|
||||
self.resultData = {detections: []}
|
||||
eDetect.data.detections.detections.forEach((d) => {
|
||||
d.label = self.detectorLabels[d.label].name
|
||||
let detectedStructure = new Structure(d)
|
||||
self.resultData.detections.push(detectedStructure)
|
||||
})
|
||||
self.resultData = eDetect.data.detections
|
||||
if (self.resultData) {
|
||||
self.resultData.detections.map(d => {d.label = self.detectorLabels[d.label].name})
|
||||
}
|
||||
self.uploadDirty = true
|
||||
} else if (eDetect.data.success == 'model') {
|
||||
reloadModel = false
|
||||
self.reloadModel = false
|
||||
loadSuccess()
|
||||
}
|
||||
f7.utils.nextFrame(() => {
|
||||
this.selectChip("redraw")
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
let loadSuccess = null
|
||||
let loadFailure = null
|
||||
let modelReloading = null
|
||||
if (!this.useWorkers && reloadModel) {
|
||||
await this.loadModel(modelLocation)
|
||||
reloadModel = false
|
||||
if (!this.useWorkers && this.reloadModel) {
|
||||
await this.loadModel(this.modelLocation)
|
||||
this.reloadModel = false
|
||||
} else {
|
||||
modelReloading = new Promise((res, rej) => {
|
||||
loadSuccess = res
|
||||
loadFailure = rej
|
||||
if (reloadModel) {
|
||||
detectWorker.postMessage({call: 'loadModel', weights: modelLocation})
|
||||
if (this.reloadModel) {
|
||||
this.detectWorker.postMessage({call: 'loadModel', weights: this.modelLocation})
|
||||
} else {
|
||||
loadSuccess()
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
if (serverSettings && serverSettings.use) {
|
||||
if (this.serverSettings && this.serverSettings.use) {
|
||||
this.remoteDetect()
|
||||
} else if (this.useWorkers) {
|
||||
Promise.all([modelReloading,createImageBitmap(this.imageView)]).then(res => {
|
||||
detectWorker.postMessage({call: 'localDetect', image: res[1]}, [res[1]])
|
||||
this.detectWorker.postMessage({call: 'localDetect', image: res[1]}, [res[1]])
|
||||
})
|
||||
} else {
|
||||
createImageBitmap(this.imageView).then(res => {
|
||||
@@ -394,9 +376,6 @@
|
||||
f7.dialog.alert(`ALVINN structure finding error: ${e.message}`)
|
||||
})
|
||||
}
|
||||
f7.utils.nextFrame(() => {
|
||||
this.selectChip("redraw")
|
||||
})
|
||||
},
|
||||
selectAll (ev) {
|
||||
if (ev.target.checked) {
|
||||
@@ -406,12 +385,12 @@
|
||||
}
|
||||
},
|
||||
async selectImage (mode) {
|
||||
imageLoadMode = mode
|
||||
this.imageLoadMode = mode
|
||||
if (this.isCordova && mode == "camera") {
|
||||
navigator.camera.getPicture(this.getImage, this.onFail, { quality: 50, destinationType: Camera.DestinationType.DATA_URL, correctOrientation: true });
|
||||
return
|
||||
}
|
||||
if (mode == "camera" && !otherSettings.disableVideo) {
|
||||
if (mode == "camera" && !this.otherSettings.disableVideo) {
|
||||
this.videoAvailable = await this.openCamera(this.$refs.image_container)
|
||||
if (this.videoAvailable) {
|
||||
this.selectedChip = -1
|
||||
@@ -419,14 +398,14 @@
|
||||
this.imageView.src = null
|
||||
this.$refs.image_cvs.style['background-image'] = 'none'
|
||||
this.resultData = {}
|
||||
const trackDetails = this.cameraStream.getVideoTracks()[0].getSettings()
|
||||
let vidElement = this.$refs.vid_viewer
|
||||
var trackDetails = this.cameraStream.getVideoTracks()[0].getSettings()
|
||||
var vidElement = this.$refs.vid_viewer
|
||||
vidElement.width = trackDetails.width
|
||||
vidElement.height = trackDetails.height
|
||||
if (!this.useWorkers) {
|
||||
this.videoFrameDetect(vidElement, miniLocation)
|
||||
this.videoFrameDetect(vidElement)
|
||||
} else {
|
||||
this.videoFrameDetectWorker(vidElement, vidWorker)
|
||||
this.videoFrameDetectWorker(vidElement)
|
||||
}
|
||||
return
|
||||
}
|
||||
@@ -444,62 +423,36 @@
|
||||
}).open()
|
||||
return
|
||||
}
|
||||
if (mode == 'clipboard') {
|
||||
navigator.clipboard.read().then(clip => {
|
||||
if (!clip[0].types.includes("image/png")) {
|
||||
throw new Error("Clipboard does not contain valid image data.");
|
||||
}
|
||||
return clip[0].getType("image/png");
|
||||
}).then(blob => {
|
||||
let clipImage = URL.createObjectURL(blob);
|
||||
this.getImage(clipImage)
|
||||
}).catch(e => {
|
||||
console.log(e)
|
||||
f7.dialog.alert(`Error pasting image: ${e.message}`)
|
||||
})
|
||||
return
|
||||
}
|
||||
this.$refs.image_chooser.click()
|
||||
},
|
||||
onFail (message) {
|
||||
alert(`Camera fail: ${message}`)
|
||||
},
|
||||
selectChip ( iChip ) {
|
||||
const [imCanvas, imageCtx] = this.resetView()
|
||||
|
||||
if (this.selectedChip == iChip) {
|
||||
this.selectedChip = -1
|
||||
this.resetView()
|
||||
return
|
||||
}
|
||||
|
||||
if (iChip == 'redraw') {
|
||||
if (this.selectedChip == -1) {
|
||||
this.resetView()
|
||||
return
|
||||
}
|
||||
if (this.selectedChip == -1) return
|
||||
iChip = this.selectedChip
|
||||
}
|
||||
const [imCanvas, imageCtx] = this.resetView(true)
|
||||
let structBox, cvsBox, screenBox
|
||||
[structBox, cvsBox, screenBox] = this.resultData.detections[iChip].box.getBoxes('side', this.imageView, imCanvas, {zoom: this.canvasZoom, offset: {...this.canvasOffset}})
|
||||
|
||||
this.infoLinkPos.x = Math.min(Math.max(screenBox.left, 0),imCanvas.width)
|
||||
this.infoLinkPos.y = Math.min(Math.max(screenBox.top, 0), imCanvas.height)
|
||||
const boxCoords = this.box2cvs(this.resultData.detections[iChip])[0]
|
||||
|
||||
const imageScale = Math.max(this.imageView.width / imCanvas.width, this.imageView.height / imCanvas.height)
|
||||
imageCtx.drawImage(this.imageView, structBox.left, structBox.top, structBox.width, structBox.height, cvsBox.left, cvsBox.top, cvsBox.width, cvsBox.height)
|
||||
imageCtx.save()
|
||||
imageCtx.arc(cvsBox.left, cvsBox.top, 14 / this.canvasZoom, 0, 2 * Math.PI)
|
||||
imageCtx.closePath()
|
||||
imageCtx.clip()
|
||||
imageCtx.drawImage(this.imageView,
|
||||
structBox.left - (14 / this.canvasZoom * imageScale),
|
||||
structBox.top - (14 / this.canvasZoom * imageScale),
|
||||
(28 / this.canvasZoom * imageScale),
|
||||
(28 / this.canvasZoom * imageScale),
|
||||
cvsBox.left - (14 / this.canvasZoom),
|
||||
cvsBox.top - (14 / this.canvasZoom),
|
||||
(28 / this.canvasZoom), (28 / this.canvasZoom))
|
||||
imageCtx.restore()
|
||||
let boxLeft = boxCoords.cvsLeft
|
||||
let boxTop = boxCoords.cvsTop
|
||||
let boxWidth = boxCoords.cvsRight - boxCoords.cvsLeft
|
||||
let boxHeight = boxCoords.cvsBottom - boxCoords.cvsTop
|
||||
this.infoLinkPos.x = boxCoords.cvsLeft
|
||||
this.infoLinkPos.y = boxCoords.cvsTop
|
||||
let boxMin = Math.min(boxHeight, boxWidth)
|
||||
this.infoLinkPos.adj = (boxMin >= 50) ? 0 : Math.min(10, 50 - boxMin)
|
||||
|
||||
imageCtx.strokeRect(boxLeft, boxTop, boxWidth, boxHeight)
|
||||
this.selectedChip = iChip
|
||||
this.resultData.detections[iChip].beenViewed = true
|
||||
|
||||
@@ -515,24 +468,18 @@
|
||||
this.uploadDirty = true
|
||||
});
|
||||
},
|
||||
resetView (drawChip) {
|
||||
resetView () {
|
||||
const imCanvas = this.$refs.image_cvs
|
||||
const imageCtx = imCanvas.getContext("2d")
|
||||
imCanvas.width = imCanvas.clientWidth
|
||||
imCanvas.height = imCanvas.clientHeight
|
||||
imageCtx.clearRect(0,0,imCanvas.width,imCanvas.height)
|
||||
imageCtx.translate(this.canvasOffset.x,this.canvasOffset.y)
|
||||
imageCtx.scale(this.canvasZoom,this.canvasZoom)
|
||||
imageCtx.globalAlpha = 1
|
||||
imageCtx.strokeStyle = 'yellow'
|
||||
imageCtx.lineWidth = 3 / this.canvasZoom
|
||||
imageCtx.lineWidth = 3
|
||||
if (this.imageLoaded) {
|
||||
const imageLoc = imageLocation.getBoxes('side', this.imageView, imCanvas)
|
||||
if (drawChip) {imageCtx.globalAlpha = .5}
|
||||
imageCtx.drawImage(this.imageView, 0, 0, this.imageView.width, this.imageView.height, imageLoc[1].left, imageLoc[1].top, imageLoc[1].width, imageLoc[1].height)
|
||||
if (drawChip) {imageCtx.globalAlpha = 1}
|
||||
imageCtx.drawImage(this.imageView, 0, 0, this.imageView.width, this.imageView.height, this.imCvsLocation.left, this.imCvsLocation.top, this.imCvsLocation.width, this.imCvsLocation.height)
|
||||
}
|
||||
this.structureZoomed = false
|
||||
return [imCanvas, imageCtx]
|
||||
},
|
||||
getImage (searchImage) {
|
||||
@@ -540,22 +487,18 @@
|
||||
if (this.videoAvailable) {
|
||||
this.closeCamera()
|
||||
this.detecting = true
|
||||
reloadModel = true
|
||||
this.reloadModel = true
|
||||
resolve(searchImage)
|
||||
} else if (this.isCordova && imageLoadMode == "camera") {
|
||||
} else if (this.isCordova && this.imageLoadMode == "camera") {
|
||||
this.detecting = true
|
||||
resolve('data:image/jpg;base64,' + searchImage)
|
||||
}
|
||||
if (imageLoadMode == 'clipboard') {
|
||||
this.detecting = true
|
||||
resolve(searchImage)
|
||||
}
|
||||
const reader = new FileReader()
|
||||
reader.addEventListener("load", () => {
|
||||
this.detecting = true
|
||||
resolve(reader.result)
|
||||
},{once: true})
|
||||
if (imageLoadMode == 'sample') {
|
||||
if (this.imageLoadMode == 'sample') {
|
||||
fetch(`${this.isCordova ? 'https://localhost' : '.'}/samples/${this.detectorName}-${searchImage}.jpeg`).then( resp => {
|
||||
return resp.blob()
|
||||
}).then(respBlob => {
|
||||
@@ -577,14 +520,16 @@
|
||||
this.imageView.src = imgData
|
||||
return(this.imageView.decode())
|
||||
}).then( () => {
|
||||
this.canvasOffset = {x: 0, y: 0}
|
||||
this.canvasZoom = 1
|
||||
const imCanvas = this.$refs.image_cvs
|
||||
imCanvas.width = imCanvas.clientWidth
|
||||
imCanvas.height = imCanvas.clientHeight
|
||||
const imageCtx = imCanvas.getContext("2d")
|
||||
const imageLoc = imageLocation.getBoxes('side', this.imageView, imCanvas)
|
||||
imageCtx.drawImage(this.imageView, 0, 0, this.imageView.width, this.imageView.height, imageLoc[1].left, imageLoc[1].top, imageLoc[1].width, imageLoc[1].height)
|
||||
let imageLoc = this.box2cvs({top: 0,left: 0,right: 1,bottom: 1})
|
||||
this.imCvsLocation.top = imageLoc[0].cvsTop
|
||||
this.imCvsLocation.left = imageLoc[0].cvsLeft
|
||||
this.imCvsLocation.width = imageLoc[0].cvsRight - imageLoc[0].cvsLeft
|
||||
this.imCvsLocation.height = imageLoc[0].cvsBottom - imageLoc[0].cvsTop
|
||||
imageCtx.drawImage(this.imageView, 0, 0, this.imageView.width, this.imageView.height, this.imCvsLocation.left, this.imCvsLocation.top, this.imCvsLocation.width, this.imCvsLocation.height)
|
||||
f7.utils.nextFrame(() => {
|
||||
this.setData()
|
||||
})
|
||||
@@ -594,7 +539,7 @@
|
||||
})
|
||||
},
|
||||
async submitData () {
|
||||
let uploadData = this.showResults
|
||||
var uploadData = this.showResults
|
||||
.filter( d => { return d.aboveThreshold && d.isSearched && !d.isDeleted })
|
||||
.map( r => { return {"top": r.top, "left": r.left, "bottom": r.bottom, "right": r.right, "label": r.label}})
|
||||
this.uploadUid = await this.uploadData(this.imageView.src.split(',')[1],uploadData,this.uploadUid)
|
||||
@@ -604,85 +549,44 @@
|
||||
this.detectorLevel = value
|
||||
},
|
||||
structureClick(e) {
|
||||
let self = this
|
||||
function loopIndex(i) {
|
||||
if (self.selectedChip == -1) return i
|
||||
let li = i + self.selectedChip
|
||||
if (li >= numBoxes) li -= numBoxes
|
||||
return li
|
||||
}
|
||||
let boxCoords = []
|
||||
this.resultData.detections.forEach(d => {
|
||||
let cvsBox = d.box.getBoxes('point',this.imageView,this.$refs.image_cvs)[1]
|
||||
cvsBox.clickable = d.aboveThreshold && d.isSearched && !d.isDeleted
|
||||
boxCoords.push(cvsBox)
|
||||
const boxCoords = this.box2cvs(this.showResults)
|
||||
var findBox = boxCoords.findIndex( (r, i) => { return r.cvsLeft <= e.offsetX &&
|
||||
r.cvsRight >= e.offsetX &&
|
||||
r.cvsTop <= e.offsetY &&
|
||||
r.cvsBottom >= e.offsetY &&
|
||||
this.resultData.detections[i].resultIndex > this.selectedChip &&
|
||||
this.resultData.detections[i].aboveThreshold &&
|
||||
this.resultData.detections[i].isSearched &&
|
||||
!this.resultData.detections[i].isDeleted
|
||||
})
|
||||
const numBoxes = boxCoords.length
|
||||
let clickX = (e.offsetX - this.canvasOffset.x) / this.canvasZoom
|
||||
let clickY = (e.offsetY - this.canvasOffset.y) / this.canvasZoom
|
||||
let boxEnd = boxCoords.splice(0, this.selectedChip)
|
||||
boxCoords = boxCoords.concat(boxEnd)
|
||||
const findBox = boxCoords.findIndex( (r, i) => {
|
||||
let di = loopIndex(i)
|
||||
if (di == this.selectedChip ) return false
|
||||
return r.clickable &&
|
||||
r.left <= clickX &&
|
||||
r.right >= clickX &&
|
||||
r.top <= clickY &&
|
||||
r.bottom >= clickY
|
||||
})
|
||||
this.selectChip(findBox >= 0 ? this.resultData.detections[loopIndex(findBox)].resultIndex : this.selectedChip)
|
||||
this.selectChip(findBox >= 0 ? this.resultData.detections[findBox].resultIndex : this.selectedChip)
|
||||
},
|
||||
toggleSettings() {
|
||||
this.showDetectSettings = !this.showDetectSettings
|
||||
f7.utils.nextFrame(() => {
|
||||
this.selectChip("redraw")
|
||||
})
|
||||
},
|
||||
startMove() {
|
||||
canvasMoving = true
|
||||
},
|
||||
endMove() {
|
||||
canvasMoving = false
|
||||
},
|
||||
makeMove(event) {
|
||||
if (canvasMoving) {
|
||||
this.canvasOffset.x += event.movementX
|
||||
this.canvasOffset.y += event.movementY
|
||||
this.selectChip("redraw")
|
||||
}
|
||||
},
|
||||
spinWheel(event) {
|
||||
let zoomFactor
|
||||
if (event.wheelDelta > 0) {
|
||||
zoomFactor = 1.05
|
||||
} else if (event.wheelDelta < 0) {
|
||||
zoomFactor = 1 / 1.05
|
||||
}
|
||||
this.canvasZoom *= zoomFactor
|
||||
this.canvasOffset.x = event.offsetX * (1 - zoomFactor) + this.canvasOffset.x * zoomFactor
|
||||
this.canvasOffset.y = event.offsetY * (1 - zoomFactor) + this.canvasOffset.y * zoomFactor
|
||||
this.selectChip("redraw")
|
||||
},
|
||||
resetZoom() {
|
||||
this.canvasZoom = 1
|
||||
this.canvasOffset.x = 0
|
||||
this.canvasOffset.y = 0
|
||||
this.selectChip("redraw")
|
||||
},
|
||||
zoomToSelected() {
|
||||
box2cvs(boxInput) {
|
||||
if (!boxInput || boxInput.length == 0) return []
|
||||
const boxList = boxInput.length ? boxInput : [boxInput]
|
||||
//const [imCanvas, imageCtx] = this.resetView()
|
||||
const imCanvas = this.$refs.image_cvs
|
||||
const boxCoords = this.resultData.detections[this.selectedChip].box.getBoxes('point', this.imageView, imCanvas)
|
||||
const boxWidth = boxCoords[1].right - boxCoords[1].left
|
||||
const boxHeight = boxCoords[1].bottom - boxCoords[1].top
|
||||
const boxMidX = (boxCoords[1].right + boxCoords[1].left ) / 2
|
||||
const boxMidY = (boxCoords[1].bottom + boxCoords[1].top ) / 2
|
||||
const zoomFactor = Math.min(imCanvas.width / boxWidth * .9, imCanvas.height / boxHeight * .9, 8)
|
||||
this.canvasZoom = zoomFactor
|
||||
this.canvasOffset.x = -(boxMidX * zoomFactor) + imCanvas.width / 2
|
||||
this.canvasOffset.y = -(boxMidY * zoomFactor) + imCanvas.height / 2
|
||||
this.selectChip("redraw")
|
||||
this.structureZoomed = true
|
||||
//const imageCtx = imCanvas.getContext("2d")
|
||||
var imgWidth
|
||||
var imgHeight
|
||||
const imgAspect = this.imageView.width / this.imageView.height
|
||||
const rendAspect = imCanvas.width / imCanvas.height
|
||||
if (imgAspect >= rendAspect) {
|
||||
imgWidth = imCanvas.width
|
||||
imgHeight = imCanvas.width / imgAspect
|
||||
} else {
|
||||
imgWidth = imCanvas.height * imgAspect
|
||||
imgHeight = imCanvas.height
|
||||
}
|
||||
const cvsCoords = boxList.map( (d, i) => {
|
||||
return {
|
||||
"cvsLeft": (imCanvas.width - imgWidth) / 2 + d.left * imgWidth,
|
||||
"cvsRight": (imCanvas.width - imgWidth) / 2 + d.right * imgWidth,
|
||||
"cvsTop": (imCanvas.height - imgHeight) / 2 + d.top * imgHeight,
|
||||
"cvsBottom": (imCanvas.height - imgHeight) / 2 + d.bottom * imgHeight
|
||||
}
|
||||
})
|
||||
return cvsCoords
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -46,11 +46,11 @@ export default {
|
||||
let rawBoxes = []
|
||||
let rawScores = []
|
||||
|
||||
for (let i = 0; i < rawRes.length; i++) {
|
||||
const getScores = rawRes[i].slice(4)
|
||||
for (var i = 0; i < rawRes.length; i++) {
|
||||
var getScores = rawRes[i].slice(4)
|
||||
if (getScores.every( s => s < .05)) { continue }
|
||||
const getBox = rawRes[i].slice(0,4)
|
||||
const boxCalc = [
|
||||
var getBox = rawRes[i].slice(0,4)
|
||||
var boxCalc = [
|
||||
(getBox[0] - (getBox[2] / 2)) / modelWidth,
|
||||
(getBox[1] - (getBox[3] / 2)) / modelHeight,
|
||||
(getBox[0] + (getBox[2] / 2)) / modelWidth,
|
||||
@@ -69,7 +69,7 @@ export default {
|
||||
let boxes_data = []
|
||||
let scores_data = []
|
||||
let classes_data = []
|
||||
for (let c = 0; c < outputSize - 4; c++) {
|
||||
for (var c = 0; c < outputSize - 4; c++) {
|
||||
structureScores = rawScores.map(x => x[c])
|
||||
tScores = tf.tensor1d(structureScores)
|
||||
resBoxes = await tf.image.nonMaxSuppressionAsync(tBoxes,tScores,10,0.5,.05)
|
||||
@@ -77,7 +77,7 @@ export default {
|
||||
tf.dispose(resBoxes)
|
||||
if (validBoxes) {
|
||||
boxes_data.push(...rawBoxes.filter( (_, idx) => validBoxes.includes(idx)))
|
||||
let outputScores = structureScores.filter( (_, idx) => validBoxes.includes(idx))
|
||||
var outputScores = structureScores.filter( (_, idx) => validBoxes.includes(idx))
|
||||
scores_data.push(...outputScores)
|
||||
classes_data.push(...outputScores.fill(c))
|
||||
}
|
||||
@@ -88,11 +88,11 @@ export default {
|
||||
tf.dispose(tScores)
|
||||
tf.dispose(tRes)
|
||||
const valid_detections_data = classes_data.length
|
||||
const output = {
|
||||
var output = {
|
||||
detections: []
|
||||
}
|
||||
for (let i =0; i < valid_detections_data; i++) {
|
||||
const [dLeft, dTop, dRight, dBottom] = boxes_data[i]
|
||||
for (var i =0; i < valid_detections_data; i++) {
|
||||
var [dLeft, dTop, dRight, dBottom] = boxes_data[i]
|
||||
output.detections.push({
|
||||
"top": dTop,
|
||||
"left": dLeft,
|
||||
@@ -110,9 +110,9 @@ export default {
|
||||
return output || { detections: [] }
|
||||
},
|
||||
getRemoteLabels() {
|
||||
let self = this
|
||||
const modelURL = `http://${this.serverSettings.address}:${this.serverSettings.port}/detectors`
|
||||
let xhr = new XMLHttpRequest()
|
||||
var self = this
|
||||
var modelURL = `http://${this.serverSettings.address}:${this.serverSettings.port}/detectors`
|
||||
var xhr = new XMLHttpRequest()
|
||||
xhr.open("GET", modelURL)
|
||||
xhr.setRequestHeader('Content-Type', 'application/json')
|
||||
xhr.timeout = 10000
|
||||
@@ -124,8 +124,8 @@ export default {
|
||||
f7.dialog.alert(`ALVINN has encountered an error: ${errorResponse.error}`)
|
||||
return
|
||||
}
|
||||
const detectors = JSON.parse(xhr.response).detectors
|
||||
let findLabel = detectors
|
||||
var detectors = JSON.parse(xhr.response).detectors
|
||||
var findLabel = detectors
|
||||
.find( d => { return d.name == self.detectorName } )?.labels
|
||||
.filter( l => { return l != "" } ).sort()
|
||||
.map( l => { return {'name': l, 'detect': true} } )
|
||||
@@ -139,9 +139,9 @@ export default {
|
||||
xhr.send()
|
||||
},
|
||||
remoteDetect() {
|
||||
let self = this
|
||||
const modelURL = `http://${this.serverSettings.address}:${this.serverSettings.port}/detect`
|
||||
let xhr = new XMLHttpRequest()
|
||||
var self = this
|
||||
var modelURL = `http://${this.serverSettings.address}:${this.serverSettings.port}/detect`
|
||||
var xhr = new XMLHttpRequest()
|
||||
xhr.open("POST", modelURL)
|
||||
xhr.timeout = 10000
|
||||
xhr.ontimeout = this.remoteTimeout
|
||||
@@ -158,7 +158,7 @@ export default {
|
||||
self.uploadDirty = true
|
||||
}
|
||||
|
||||
const doodsData = {
|
||||
var doodsData = {
|
||||
"detector_name": this.detectorName,
|
||||
"detect": {
|
||||
"*": 1
|
||||
@@ -172,8 +172,8 @@ export default {
|
||||
this.detecting = false
|
||||
f7.dialog.alert('No connection to remote ALVINN instance. Please check app settings.')
|
||||
},
|
||||
async videoFrameDetect (vidData, miniModel) {
|
||||
await this.loadModel(miniModel)
|
||||
async videoFrameDetect (vidData) {
|
||||
await this.loadModel(this.miniLocation)
|
||||
const [modelWidth, modelHeight] = model.inputs[0].shape.slice(1, 3)
|
||||
const imCanvas = this.$refs.image_cvs
|
||||
const imageCtx = imCanvas.getContext("2d")
|
||||
@@ -182,7 +182,8 @@ export default {
|
||||
imCanvas.width = imCanvas.clientWidth
|
||||
imCanvas.height = imCanvas.clientHeight
|
||||
imageCtx.clearRect(0,0,imCanvas.width,imCanvas.height)
|
||||
let imgWidth, imgHeight
|
||||
var imgWidth
|
||||
var imgHeight
|
||||
const imgAspect = vidData.width / vidData.height
|
||||
const rendAspect = imCanvas.width / imCanvas.height
|
||||
if (imgAspect >= rendAspect) {
|
||||
@@ -203,7 +204,7 @@ export default {
|
||||
|
||||
let rawCoords = []
|
||||
if (rawRes) {
|
||||
for (let i = 0; i < rawRes.length; i++) {
|
||||
for (var i = 0; i < rawRes.length; i++) {
|
||||
let getScores = rawRes[i].slice(4)
|
||||
if (getScores.some( s => s > .5)) {
|
||||
let foundTarget = rawRes[i].slice(0,2)
|
||||
@@ -213,7 +214,7 @@ export default {
|
||||
}
|
||||
|
||||
imageCtx.clearRect(0,0,imCanvas.width,imCanvas.height)
|
||||
for (let coord of rawCoords) {
|
||||
for (var coord of rawCoords) {
|
||||
console.log(`x: ${coord[0]}, y: ${coord[1]}`)
|
||||
let pointX = (imCanvas.width - imgWidth) / 2 + (coord[0] / modelWidth) * imgWidth -5
|
||||
let pointY = (imCanvas.height - imgHeight) / 2 + (coord[1] / modelHeight) * imgHeight -5
|
||||
|
||||
@@ -21,7 +21,6 @@
|
||||
</ul>
|
||||
</li>
|
||||
<li>Click on the image file icon <SvgIcon icon="photo_library" class="list-svg"/> to load a picture from the device storage.</li>
|
||||
<li>If the clipboard is available on the system, then there will be a paste icon <SvgIcon icon="clipboard" class="list-svg"/> to paste image data directly into the app.</li>
|
||||
<li>If demo mode is turned on, you can click on the marked image icon <SvgIcon icon="photo_sample" class="list-svg"/> to load an ALVINN sample image.</li>
|
||||
</ul>
|
||||
</li>
|
||||
@@ -31,11 +30,8 @@
|
||||
<li>Click on each tag to see the structure highlighted in the image or click on the image to see the tag for that structure (additional clicks to the same area will select overlapping structres).</li>
|
||||
<li>Tag color and proportion filled indicate ALVINN's level of confidence in the identification.</li>
|
||||
<li>An incorrect tag can be deleted by clicking on the tag's <f7-icon icon="chip-delete" style="margin-right: 1px;"></f7-icon> button.</li>
|
||||
<li>Click on the zoom to structure button <SvgIcon icon="zoom_to" class="list-svg"/> to magnify the view of the selected structure</li>
|
||||
</ul>
|
||||
</li>
|
||||
<li>Pan (middle click or touch and drag) and zoom (mouse wheel or pinch) to manually select detailed views in the image.</li>
|
||||
<li>The reset zoom button <SvgIcon icon="reset_zoom" class="list-svg"/> will return the image to its initial position and magnification.</li>
|
||||
</ol>
|
||||
<h2>Advanced Features</h2>
|
||||
<h3>Detection Parameters</h3>
|
||||
|
||||
@@ -91,7 +91,7 @@
|
||||
computed: {
|
||||
otherIp () {
|
||||
let filteredIps = {}
|
||||
for (let oldIp in this.serverSettings.previous) {
|
||||
for (var oldIp in this.serverSettings.previous) {
|
||||
if (oldIp != this.serverSettings.address) {
|
||||
filteredIps[oldIp] = this.serverSettings.previous[oldIp]
|
||||
}
|
||||
@@ -109,12 +109,12 @@
|
||||
}
|
||||
},
|
||||
created () {
|
||||
const loadServerSettings = localStorage.getItem('serverSettings')
|
||||
var loadServerSettings = localStorage.getItem('serverSettings')
|
||||
if (loadServerSettings) this.serverSettings = JSON.parse(loadServerSettings)
|
||||
if (!this.serverSettings.previous) this.serverSettings.previous = {}
|
||||
const loadThemeSettings = localStorage.getItem('themeSettings')
|
||||
var loadThemeSettings = localStorage.getItem('themeSettings')
|
||||
if (loadThemeSettings) this.themeSettings = JSON.parse(loadThemeSettings)
|
||||
const loadOtherSettings = localStorage.getItem('otherSettings')
|
||||
var loadOtherSettings = localStorage.getItem('otherSettings')
|
||||
if (loadOtherSettings) this.otherSettings = JSON.parse(loadOtherSettings)
|
||||
},
|
||||
methods: {
|
||||
@@ -136,7 +136,7 @@
|
||||
)
|
||||
saveSetting.then(
|
||||
() => {
|
||||
const toast = f7.toast.create({
|
||||
var toast = f7.toast.create({
|
||||
text: 'Settings saved',
|
||||
closeTimeout: 2000
|
||||
})
|
||||
@@ -144,7 +144,7 @@
|
||||
this.isDirty = false;
|
||||
},
|
||||
() => {
|
||||
const toast = f7.toast.create({
|
||||
var toast = f7.toast.create({
|
||||
text: 'ERROR: No settings saved',
|
||||
closeTimeout: 2000
|
||||
})
|
||||
|
||||
@@ -63,7 +63,7 @@
|
||||
return store()
|
||||
},
|
||||
created () {
|
||||
const loadOtherSettings = localStorage.getItem('otherSettings')
|
||||
var loadOtherSettings = localStorage.getItem('otherSettings')
|
||||
if (loadOtherSettings) this.otherSettings = JSON.parse(loadOtherSettings)
|
||||
fetch(`${this.isCordova ? 'https://localhost' : '.'}/models/thorax/descript.json`)
|
||||
.then((mod) => { return mod.json() })
|
||||
|
||||
@@ -5,8 +5,8 @@ export default {
|
||||
newUid (length) {
|
||||
const uidLength = length || 16
|
||||
const uidChars = 'abcdefghijklmnopqrstuvwxyz0123456789'
|
||||
let uid = []
|
||||
for (let i = 0; i < uidLength; i++) {
|
||||
var uid = []
|
||||
for (var i = 0; i < uidLength; i++) {
|
||||
uid.push(uidChars.charAt(Math.floor(Math.random() * ((i < 4) ? 26 : 36))))
|
||||
}
|
||||
return uid.join('')
|
||||
@@ -14,23 +14,24 @@ export default {
|
||||
uploadData (imagePayload, classPayload, prevUid) {
|
||||
let uploadImage = new Promise (resolve => {
|
||||
const dataUid = prevUid || this.newUid(16)
|
||||
let byteChars = window.atob(imagePayload)
|
||||
let byteArrays = []
|
||||
var byteChars = window.atob(imagePayload)
|
||||
var byteArrays = []
|
||||
var len = byteChars.length
|
||||
|
||||
for (let offset = 0; offset < byteChars.length; offset += 1024) {
|
||||
let slice = byteChars.slice(offset, offset + 1024)
|
||||
let byteNumbers = new Array(slice.length)
|
||||
for (let i = 0; i < slice.length; i++) {
|
||||
for (var offset = 0; offset < len; offset += 1024) {
|
||||
var slice = byteChars.slice(offset, offset + 1024)
|
||||
var byteNumbers = new Array(slice.length)
|
||||
for (var i = 0; i < slice.length; i++) {
|
||||
byteNumbers[i] = slice.charCodeAt(i)
|
||||
}
|
||||
|
||||
let byteArray = new Uint8Array(byteNumbers)
|
||||
var byteArray = new Uint8Array(byteNumbers)
|
||||
byteArrays.push(byteArray)
|
||||
}
|
||||
const imageBlob = new Blob(byteArrays, {type: 'image/jpeg'})
|
||||
var imageBlob = new Blob(byteArrays, {type: 'image/jpeg'})
|
||||
|
||||
let xhrJpg = new XMLHttpRequest()
|
||||
let uploadUrl = `https://nextcloud.azgeorgis.net/public.php/webdav/${dataUid}.jpeg`
|
||||
var xhrJpg = new XMLHttpRequest()
|
||||
var uploadUrl = `https://nextcloud.azgeorgis.net/public.php/webdav/${dataUid}.jpeg`
|
||||
xhrJpg.open("PUT", uploadUrl)
|
||||
xhrJpg.setRequestHeader('Content-Type', 'image/jpeg')
|
||||
xhrJpg.setRequestHeader('X-Method-Override', 'PUT')
|
||||
@@ -38,8 +39,8 @@ export default {
|
||||
xhrJpg.setRequestHeader("Authorization", "Basic " + btoa("LKBm3H6JdSaywyg:"))
|
||||
xhrJpg.send(imageBlob)
|
||||
|
||||
let xhrTxt = new XMLHttpRequest()
|
||||
uploadUrl = `https://nextcloud.azgeorgis.net/public.php/webdav/${dataUid}.txt`
|
||||
var xhrTxt = new XMLHttpRequest()
|
||||
var uploadUrl = `https://nextcloud.azgeorgis.net/public.php/webdav/${dataUid}.txt`
|
||||
xhrTxt.open("PUT", uploadUrl)
|
||||
xhrTxt.setRequestHeader('Content-Type', 'text/plain')
|
||||
xhrTxt.setRequestHeader('X-Method-Override', 'PUT')
|
||||
@@ -50,7 +51,7 @@ export default {
|
||||
resolve(dataUid)
|
||||
})
|
||||
return uploadImage.then((newUid) => {
|
||||
const toast = f7.toast.create({
|
||||
var toast = f7.toast.create({
|
||||
text: 'Detections Uploaded: thank you.',
|
||||
closeTimeout: 2000
|
||||
})
|
||||
|
||||
@@ -1,51 +0,0 @@
|
||||
export default {
|
||||
data () {
|
||||
return {
|
||||
touchPrevious: {}
|
||||
}
|
||||
},
|
||||
methods: {
|
||||
startTouch(event) {
|
||||
if (event.touches.length == 1) {
|
||||
this.touchPrevious = {x: event.touches[0].clientX, y: event.touches[0].clientY}
|
||||
}
|
||||
if (event.touches.length == 2) {
|
||||
let midX = (event.touches.item(0).clientX + event.touches.item(1).clientX) / 2
|
||||
let midY = (event.touches.item(0).clientY + event.touches.item(1).clientY) / 2
|
||||
this.touchPrevious = {distance: this.touchDistance(event.touches), x: midX, y: midY}
|
||||
}
|
||||
},
|
||||
endTouch(event) {
|
||||
if (event.touches.length == 1) {
|
||||
this.touchPrevious = {x: event.touches[0].clientX, y: event.touches[0].clientY}
|
||||
} else {
|
||||
//this.debugInfo = null
|
||||
}
|
||||
},
|
||||
moveTouch(event) {
|
||||
switch (event.touches.length) {
|
||||
case 1:
|
||||
this.canvasOffset.x += event.touches[0].clientX - this.touchPrevious.x
|
||||
this.canvasOffset.y += event.touches[0].clientY - this.touchPrevious.y
|
||||
this.touchPrevious = {x: event.touches[0].clientX, y: event.touches[0].clientY}
|
||||
break;
|
||||
case 2:
|
||||
let newDistance = this.touchDistance(event.touches)
|
||||
let midX = (event.touches.item(0).clientX + event.touches.item(1).clientX) / 2
|
||||
let midY = (event.touches.item(0).clientY + event.touches.item(1).clientY) / 2
|
||||
let zoomFactor = newDistance / this.touchPrevious.distance
|
||||
this.canvasZoom *= zoomFactor
|
||||
this.canvasOffset.x = (midX - 16) * (1 - zoomFactor) + this.canvasOffset.x * zoomFactor + (midX - this.touchPrevious.x)
|
||||
this.canvasOffset.y = (midY - 96) * (1 - zoomFactor) + this.canvasOffset.y * zoomFactor + (midY - this.touchPrevious.y)
|
||||
this.touchPrevious = {distance: newDistance, x: midX, y: midY}
|
||||
break;
|
||||
}
|
||||
this.selectChip("redraw")
|
||||
},
|
||||
touchDistance(touches) {
|
||||
let touch1 = touches.item(0)
|
||||
let touch2 = touches.item(1)
|
||||
return Math.sqrt((touch1.clientX - touch2.clientX) ** 2 + (touch1.clientY - touch2.clientY) ** 2)
|
||||
}
|
||||
}
|
||||
}
|
||||
Reference in New Issue
Block a user