feat(src/views/user): 人脸识别

duizhaopin_ui
duan 2 years ago
parent caa026f687
commit 768f4ce114

@ -3,3 +3,4 @@ ENV = 'development'
# base api # base api
VUE_APP_BASE_API = 'http://111.231.67.55:9001' VUE_APP_BASE_API = 'http://111.231.67.55:9001'
VUE_APP_FACE_API = 'http://127.0.0.1:5000'

@ -3,4 +3,5 @@ ENV = 'production'
# base api # base api
VUE_APP_BASE_API = '/prod-api' VUE_APP_BASE_API = '/prod-api'
VUE_APP_FACE_API ='http://127.0.0.1:5000'

@ -5,4 +5,5 @@ ENV = 'staging'
# base api # base api
VUE_APP_BASE_API = 'http://111.231.67.55:9001' VUE_APP_BASE_API = 'http://111.231.67.55:9001'
VUE_APP_FACE_API='http://127.0.0.1:5000'

@ -114,17 +114,37 @@ export function get_role_name_list(data) {
data data
}) })
} }
// export function getInfo(token) {
// return request({ export function faceStart() {
// url: '/vue-admin-template/userinfo/info', return request({
// method: 'get', url: '/face/startrecognition/',
// params: { token } method: 'get',
// }) baseURL: process.env.VUE_APP_FACE_API
// } })
// }
// export function logout() {
// return request({ export function faceStop() {
// url: '/api/userinfo/logout', return request({
// method: 'post' url: '/face/stoprecognition/',
// }) method: 'get',
// } baseURL: process.env.VUE_APP_FACE_API
})
}
export function faceRecognition(data) {
return request({
url: '/face/recognition/',
method: 'post',
baseURL: process.env.VUE_APP_FACE_API,
data
})
}
export function faceAdd(data) {
return request({
url: '/face/add/',
method: 'post',
baseURL: process.env.VUE_APP_FACE_API,
data
})
}

@ -48,7 +48,9 @@ export function parseTime(time, cFormat) {
const time_str = format.replace(/{([ymdhisa])+}/g, (result, key) => { const time_str = format.replace(/{([ymdhisa])+}/g, (result, key) => {
const value = formatObj[key] const value = formatObj[key]
// Note: getDay() returns 0 on Sunday // Note: getDay() returns 0 on Sunday
if (key === 'a') { return ['日', '一', '二', '三', '四', '五', '六'][value ] } if (key === 'a') {
return ['日', '一', '二', '三', '四', '五', '六'][value]
}
return value.toString().padStart(2, '0') return value.toString().padStart(2, '0')
}) })
return time_str return time_str
@ -208,3 +210,36 @@ export let userMedia = function(constraints, success, error) {
userMedia(constraints, success, error) userMedia(constraints, success, error)
} }
export const web_stream = {
get: function(url, callback) {
let webClient
if (url.startsWith('http://')) {
webClient = require('http')
} else if (url.startsWith('https://')) {
webClient = require('https')
} else {
// eslint-disable-next-line no-throw-literal
throw 'Unsupported protocol.'
}
const clientRequest = webClient.get(url, function(response) {
response.on('data', function(chunk) {
// let data = chunk.toString().split(/\r\n/);
callback(chunk)
})
})
return {
url: url,
handler: clientRequest,
on: function(type, listener) {
clientRequest.on(type, listener)
},
destroy: function() {
clientRequest.destroy()
}
}
}
}

@ -183,13 +183,14 @@
<el-dialog <el-dialog
:close-on-click-modal="false" :close-on-click-modal="false"
title="录入人脸" title="录入人脸"
:visible.sync="dialogFaceVisible" destroy-on-close
:visible="dialogFaceVisible"
width="400px" width="400px"
@close="handleCancel" @close="handleCancel"
> >
<div> <div>
<div class="face-content"> <div class="face-content">
<video id="video" width="200px" height="200px" style="transform:rotateY(180deg)" autoplay /> <img v-if="play" id="video" width="200px" height="200px" :src="play" alt="">
<canvas id="canvas" width="200px" height="200px" style="transform:rotateY(180deg)" /> <canvas id="canvas" width="200px" height="200px" style="transform:rotateY(180deg)" />
</div> </div>
<div class="name">程小红</div> <div class="name">程小红</div>
@ -205,11 +206,19 @@
<script> <script>
import UserForm from '@/components/UserForm' import UserForm from '@/components/UserForm'
import { get_user_list, add, update, del, get_module_list, get_user_power_list, add_user_power, get_role_name_list } from '@/api/user/user' import {
add,
add_user_power,
del, faceStart,
faceStop, faceAdd,
get_module_list,
get_role_name_list,
get_user_list,
get_user_power_list,
update
} from '@/api/user/user'
import stringify from '@/utils/stringify' import stringify from '@/utils/stringify'
import { userMedia } from '@/utils' import { web_stream as webStream } from '@/utils'
require('tracking/build/tracking-min.js')
require('tracking/build/data/face-min.js')
export default { export default {
name: 'User', name: 'User',
@ -264,7 +273,11 @@ export default {
// //
dialogFaceVisible: false, dialogFaceVisible: false,
videoObj: null, videoObj: null,
trackerTask: null trackerTask: null,
web_stream: null,
img_list: [],
play: null,
rflag: false
} }
}, },
created() { created() {
@ -361,8 +374,33 @@ export default {
this.dialogVisible = true this.dialogVisible = true
}, },
handleFace(index, row) { handleFace(index, row) {
faceStart().then(res => {
setTimeout(() => { this.play = process.env.VUE_APP_FACE_API + '/face/play/' }, 200)
})
this.rflag = true
this.dialogFaceVisible = true this.dialogFaceVisible = true
this.openCamera() this.web_stream = webStream.get(process.env.VUE_APP_FACE_API + '/face/play/', (data) => {
if (data.length > 41) {
const buffer = Buffer.allocUnsafe(data.byteLength - 41)
data.copy(buffer, 0, 37)
this.img_src = buffer.toString('base64')
const count = Math.floor(Math.random() * 10 + 1)
//
if (count < 7) {
this.img_list.push('data:image/png;base64,' + this.img_src)
}
if (this.img_list.length > 200 && this.rflag) {
const data = {
'images': this.img_list,
'user_id': row.user_id
}
faceAdd(data).then((res) => {
this.rflag = false
})
this.img_list = []
}
}
})
}, },
handleRole(index, row) { handleRole(index, row) {
this.roleDialogVisible = true this.roleDialogVisible = true
@ -441,51 +479,17 @@ export default {
instrument_module_id: [] instrument_module_id: []
} }
}, },
/*
人脸识别相关
*/
openCamera() {
//
this.$nextTick(() => {
const canvas = document.getElementById('canvas')
const context = canvas.getContext('2d')
this.videoObj = document.getElementById('video')
// eslint-disable-next-line no-undef
const tracker = new tracking.ObjectTracker('face') //
tracker.setInitialScale(4)
tracker.setStepSize(2)
tracker.setEdgesDensity(0.1)
// eslint-disable-next-line no-undef
this.trackerTask = tracking.track('#video', tracker, { camera: true })
const constraints = {
video: { width: 200, height: 200 },
audio: false
}
userMedia(constraints, this.success, this.error)
tracker.on('track', (event) => {
event.data.forEach((rect) => {
// canvas
context.drawImage(this.videoObj, 0, 0, canvas.width, canvas.height)
context.font = '16px Helvetica'
context.strokeStyle = '#1890ff'
context.strokeRect(rect.x, rect.y, rect.width, rect.height)
})
if (event.data.length !== 0) {
//
// canvas toDataURL
const base64Img = canvas.toDataURL('image/jpeg')
console.log(base64Img)
}
})
})
},
handleCancel() { handleCancel() {
faceStop().then(
res => {
console.log(res)
}
)
this.web_stream.destroy()
this.play = null
this.rflag = false
this.dialogFaceVisible = false
this.videoObj.srcObject.getTracks()[0].stop() this.videoObj.srcObject.getTracks()[0].stop()
this.trackerTask.stop() this.trackerTask.stop()
}, },
@ -529,7 +533,7 @@ export default {
float: right; float: right;
} }
} }
video,canvas{ #video,canvas{
position: absolute; position: absolute;
top: 100px; top: 100px;
left: 120px; left: 120px;

Loading…
Cancel
Save