云迹是一套基于场所码数据分析的一套疫情防控看板,集“场所码”,“商店码”,“街道码”多方位数据收集、跟踪、分析于一体的实时性大数据看板,为疫情防控提供可靠的及时数据,对未来趋势预测提供数据基础。 系统包括场所码生成、人员信息采集,大数据分析等模块,该系统基于真实开发流程,采用企业级敏捷开发平台,分布式微服务架构。从需求梳理,迭代规划,编码实现,压力测试, 代码合并,自动化部署等流程完成实战开发。
https://gitee.com/White-lby/vue3-h5-development.git
https://gitee.com/beiyou123/project-base-template.git
npm install 安装依赖
npm run dev 运行项目
https://cn.vitejs.dev/
https://element-plus.gitee.io/zh-CN/
Element Plus,由饿了么大前端团队开源出品的一套为开发者、设计师和产品经理准备的基于 Vue 3.0 的组件库,Element Plus是基于Vue3面向设计师和开发者的组件库,提供了配套设计资源,帮助你的网站快速成型。
https://www.axios-http.cn/
Axios 是一个基于 promise 的网络请求库,可以用于浏览器和 node.js
https://cn.vuejs.org/
渐进式 JavaScript 框架 易学易用,性能出色,适用场景丰富的 Web 前端框架。
https://router.vuejs.org/zh/
Vue Router 是 Vue.js 的官方路由。它与 Vue.js 核心深度集成,让用 Vue.js 构建单页应用变得轻而易举
https://pinia.vuejs.org/zh/
Pinia 是 Vue 的专属状态管理库,它允许你跨组件或页面共享状态。
import http from "@/http/index"
export default {
select: {
name: "查询",
url: "/yunji-api/mygirlfriend",
call: async function (params: any = {}) {
return await http.get(this.url, params);
}
},
}
import girlApi from "./girlApi";
export {girlApi}
<script setup lang="ts">
import { onMounted } from 'vue'
import { girlApi } from "@/api/index"
onMounted(() => {
girlApi.select.call().then(res => {
console.table(res)
})
})
</script>
https://map.heifahaizei.com/doc/index.html
npm install vue-baidu-map-3x --save
import BaiduMap from 'vue-baidu-map-3x'
import {BmlMarkerClusterer} from 'vue-baidu-map-3x'
const app = createApp(App)
app.use(BaiduMap, {
// ak 是在百度地图开发者平台申请的密钥 详见 http://lbsyun.baidu.com/apiconsole/key */
ak: 'FMYihQ2aXcKidOkniSS9hv68QcH7gskK',
// v:'2.0', // 默认使用3.0
// type: 'WebGL' // ||API 默认API (使用此模式 BMap=BMapGL)
});
app.component('bml-marker-cluster', BmlMarkerClusterer)
app.mount('#app')
<baidu-map class="bm-view" center="郑州" >
</baidu-map>
<baidu-map class="bm-view" :center="center" :zoom="15" @ready="handler" :scroll-wheel-zoom="true">
<bm-city-list anchor="BMAP_ANCHOR_TOP_LEFT"></bm-city-list>
<bm-marker :position="{ lng: lng, lat: lat }" :dragging="true" @dragend="dragend" :icon="{
url: 'http://developer.baidu.com/map/jsdemo/img/fox.gif',
size: { width: 300, height: 157 }
}">
</bm-marker>
</baidu-map>
<script setup lang="ts">
import { getAddressByPoints } from 'vue-baidu-map-3x';
const dragend = (e) => {
lng.value = e.point.lng;
lat.value = e.point.lat;
const config = {
location: [lat.value, lng.value]
};
getAddressByPoints(config, ['FMYihQ2aXcKidOkniSS9hv68QcH7gskK']).then((res) => {
address1.value = res.formatted_address;
}).catch();
}
</script>
https://www.npmjs.com/package/@chenfengyuan/vue-qrcode
npm install @chenfengyuan/vue-qrcod
import VueQrcode from '@chenfengyuan/vue-qrcode';
const app = createApp(App)
app.component(VueQrcode.name, VueQrcode);
<template>
<el-pagination background layout="prev, pager, next" :total="pageInfo.total" :page-size="pageInfo.pageSize"
@current-change="pageChange" />
<el-drawer v-model="drawer" title="场所码" direction="ltr">
<el-card class="card" >
<span class="title">{{ ma.title }}</span>
<figure class="qrcode">
<vue-qrcode :value="ma.url" tag="img" ></vue-qrcode>
<img class="qrcode__image" :src="ma.img" />
</figure>
<div style="padding: 4px">
<div class="bottom">
<time class="time">打开 [支付宝/微信/豫事办] 扫一扫</time>
</div>
</div>
</el-card>
</el-drawer>
</template>
https://ai.arcsoft.com.cn/product/arcface.html
ArcSoft_ArcFace_Java_Windows_x64_V3.0.mp4
@Service
public class MyFaceEngine {
private static FaceEngine faceEngine;
static {
//从官网获取
String appId = "7FvDuNLSQXD63tfzWR3v1mbmL7VmRFSEjrLCvX1Zhrum";
String sdkKey = "FeFfCbKqwYuMCGnzEHoT5a7rxhRazpsTY7amiYjYKc1a";
//解压后的dll文件路径
faceEngine = new FaceEngine("D:\\ArcSoft_ArcFace_Java_Windows_x64_V3.0\\libs\\WIN64");
//激活引擎
int errorCode = faceEngine.activeOnline(appId, sdkKey);
if (errorCode != ErrorInfo.MOK.getValue() && errorCode != ErrorInfo.MERR_ASF_ALREADY_ACTIVATED.getValue()) {
System.out.println("引擎激活失败");
}
ActiveFileInfo activeFileInfo=new ActiveFileInfo();
errorCode = faceEngine.getActiveFileInfo(activeFileInfo);
if (errorCode != ErrorInfo.MOK.getValue() && errorCode != ErrorInfo.MERR_ASF_ALREADY_ACTIVATED.getValue()) {
System.out.println("获取激活文件信息失败");
}
//引擎配置
EngineConfiguration engineConfiguration = new EngineConfiguration();
engineConfiguration.setDetectMode(DetectMode.ASF_DETECT_MODE_IMAGE);
engineConfiguration.setDetectFaceOrientPriority(DetectOrient.ASF_OP_ALL_OUT);
engineConfiguration.setDetectFaceMaxNum(10);
engineConfiguration.setDetectFaceScaleVal(16);
//功能配置
FunctionConfiguration functionConfiguration = new FunctionConfiguration();
functionConfiguration.setSupportAge(true);
functionConfiguration.setSupportFace3dAngle(true);
functionConfiguration.setSupportFaceDetect(true);
functionConfiguration.setSupportFaceRecognition(true);
functionConfiguration.setSupportGender(true);
functionConfiguration.setSupportLiveness(true);
functionConfiguration.setSupportIRLiveness(true);
engineConfiguration.setFunctionConfiguration(functionConfiguration);
//初始化引擎
errorCode = faceEngine.init(engineConfiguration);
if (errorCode != ErrorInfo.MOK.getValue()) {
System.out.println("初始化引擎失败");
}
}
public byte[] getFeature(byte[] bytes){
//人脸检测
//new File("D:\\hm\\ArcSoft_ArcFace_Java_Windows_x64_V3.0\\a.jpg")
ImageInfo imageInfo = getRGBData(bytes);
List<FaceInfo> faceInfoList = new ArrayList<FaceInfo>();
int errorCode = faceEngine.detectFaces(imageInfo.getImageData(), imageInfo.getWidth(), imageInfo.getHeight(), imageInfo.getImageFormat(), faceInfoList);
System.out.println(faceInfoList);
//特征提取
FaceFeature faceFeature = new FaceFeature();
errorCode = faceEngine.extractFaceFeature(imageInfo.getImageData(), imageInfo.getWidth(), imageInfo.getHeight(), imageInfo.getImageFormat(), faceInfoList.get(0), faceFeature);
System.out.println("特征值大小:" + faceFeature.getFeatureData().length);
return faceFeature.getFeatureData();
}
public float compare(byte[] data1,byte[] data2){
//特征比对
FaceFeature targetFaceFeature = new FaceFeature();
targetFaceFeature.setFeatureData(data1);
FaceFeature sourceFaceFeature = new FaceFeature();
sourceFaceFeature.setFeatureData(data2);
FaceSimilar faceSimilar = new FaceSimilar();
int errorCode = faceEngine.compareFaceFeature(targetFaceFeature, sourceFaceFeature, faceSimilar);
System.out.println("相似度:" + faceSimilar.getScore());
return faceSimilar.getScore();
}}
String[] dataArray = StrUtil.splitToArray("图片base64字符串", "base64,");
byte[] bytes = Base64.decode(dataArray[1]);
byte[] feature = myFaceEngine.getFeature(bytes);
String img = userImg.getFaceImg();
String[] dataArray = StrUtil.splitToArray(img, "base64,");
byte[] bytes = Base64.decode(dataArray[1]);
byte[] feature = myFaceEngine.getFeature(bytes);
Map<String, byte[]> entries = hashOperations.entries("venue." + userImg.getVenueId());
for (Map.Entry<String, byte[]> entry : entries.entrySet()) {
//开始人脸对比相似度
float b = myFaceEngine.compare(feature,entry.getValue());
}
# Redis配置
spring.redis.host=123.57.206.19
spring.redis.port=6380
导入依赖:
<dependency>
<groupId>org.springframework.boot</groupId>
<artifactId>spring-boot-starter-data-redis</artifactId>
</dependency>
创建RedisConfig类:
@Configuration
public class RedisConfig {
@Bean(name = "bytesHashOperations")
public HashOperations<String, String,byte[]> bytesRedisTemplate(RedisConnectionFactory redisConnectionFactory) {
RedisTemplate<String, byte[]> redisTemplate = new RedisTemplate<>();
redisTemplate.setConnectionFactory(redisConnectionFactory);
redisTemplate.setKeySerializer(RedisSerializer.string());
redisTemplate.setHashKeySerializer(RedisSerializer.string());
//设置hash的值的序列化工具为jsonSerializer
redisTemplate.setHashValueSerializer(RedisSerializer.byteArray());
redisTemplate.afterPropertiesSet();
return redisTemplate.opsForHash();
}
}
//注入,@Autowired不行就用@Resource
@Autowired
private StringRedisTemplate stringRedisTemplate;
//然后在人脸识别方法中加入,注意saan要在2.7.5版本中使用,故整个项目要进行升级版本 //即root的pom.xml中改为2.7.5
ScanOptions scanOptions = ScanOptions.scanOptions().match("venue.*").build();
Cursor<String> cursor = stringRedisTemplate.scan(scanOptions);
while (cursor.hasNext()){
System.out.println(cursor.next());
}
cursor.close();
npm install tracking
兼容性bug修复
function getUserMedia(constrains, success, error) {
if (navigator.mediaDevices && navigator.mediaDevices.getUserMedia) {
//最新标准API
// Notify({ type: 'success', message: '支持最新标准API' });
navigator.mediaDevices.getUserMedia(constrains).then(success).catch(error);
} else if (navigator.webkitGetUserMedia) {
// Notify({ type: 'success', message: '支持webkit内核浏览器' });
//webkit内核浏览器
navigator.webkitGetUserMedia(constrains).then(success).catch(error);
} else if (navigator.mozGetUserMedia) {
// Notify({ type: 'success', message: '支持Firefox浏览器' });
//Firefox浏览器
navagator.mozGetUserMedia(constrains).then(success).catch(error);
} else if (navigator.getUserMedia) {
// Notify({ type: 'success', message: '支持旧版API' });
//旧版API
navigator.getUserMedia(constrains).then(success).catch(error);
} else {
// Notify('浏览器不支持getUserMedia');
console.log("浏览器不支持getUserMedia")
}
}
// 要重写initUserMedia_ 方法,因为chrome的底层api已做调整
window.tracking.initUserMedia_ = function (element, opt_options) {
const options = {
video: true,
audio: !!(opt_options && opt_options.audio)
};
getUserMedia(options, function (stream) {
try {
element.srcObject = stream;
} catch (err) {
element.src = window.URL.createObjectURL(stream);
}
}, function (e) {
// Notify(e.message);
console.log(e.message)
}
);
};
// 重写视频捕获方法,因为不能停止 stop无效的bug
window.tracking.trackVideo_ = function (element, tracker) {
var canvas = document.createElement('canvas');
var context = canvas.getContext('2d');
var width;
var height;
var resizeCanvas_ = function () {
width = element.offsetWidth;
height = element.offsetHeight;
canvas.width = width;
canvas.height = height;
};
resizeCanvas_();
element.addEventListener('resize', resizeCanvas_);
var requestId;
var stopped = false;
var requestAnimationFrame_ = function () {
requestId = window.requestAnimationFrame(function () {
if (element.readyState === element.HAVE_ENOUGH_DATA) {
try {
// Firefox v~30.0 gets confused with the video readyState firing an
// erroneous HAVE_ENOUGH_DATA just before HAVE_CURRENT_DATA state,
// hence keep trying to read it until resolved.
context.drawImage(element, 0, 0, width, height);
} catch (err) { }
tracking.trackCanvasInternal_(canvas, tracker);
}
if (stopped !== true) {
requestAnimationFrame_();
}
});
};
var task = new tracking.TrackerTask(tracker);
task.on('stop', function () {
stopped = true;
window.cancelAnimationFrame(requestId);
});
task.on('run', function () {
stopped = false;
requestAnimationFrame_();
});
return task.run();
};
/////////////////
//////////////
import "tracking";
import "tracking/build/data/face";
import "@/asset/trackingX.js"
/////////////////
<template>
<el-row :gutter="20">
<el-col :span="12">
<div class="wrapp">
<div class="status" :style="{ color: msg === '检测到人脸' ? 'green' : 'red' }">
{{ msg }}
</div>
<div class="videoWrapp">
<video id="myVideo" preload="preload" autoplay loop muted></video>
<canvas ref="myCanvas" id="myCanvas" width="200" height="200"></canvas>
</div>
<div class="cjbt">
<el-button type="primary" @click="start">采集</el-button>
</div>
</div>
</el-col>
<el-col :span="12">
<el-space wrap>
<el-image class="img" :src="img" v-if="img != ''" />
<el-button type="success" @click="up">上传</el-button>
</el-space>
</el-col>
</el-row>
</template>
<script setup lang="ts">
import { onMounted, ref, reactive } from 'vue'
const img = ref('')
const msg = ref("");
const collecting = ref(false)
onMounted(() => {
init()
});
let trackerTask: any = null;
// 标识用的画布
const myCanvas = ref();
// 实例颜色检查器
const myTracker: any = new tracking.ObjectTracker("face");
myTracker.setInitialScale(4);
myTracker.setStepSize(2);
myTracker.setEdgesDensity(0.1);
const start = () => {
trackerTask.run()
collecting.value = true;
}
const init = () => {
// 触发颜色检查器
trackerTask = tracking.track("#myVideo", myTracker, { camera: true });
// 监听颜色检查器
myTracker.on("track", (event: any) => {
const context = myCanvas.value?.getContext("2d") as CanvasRenderingContext2D;
context.clearRect(0, 0, myCanvas.value.width, myCanvas.value.height);
if (event.data.length === 0) {
msg.value = "没识别到人脸...";
} else {
msg.value = "检测到人脸";
////////////画框
event.data.forEach((rect: any) => {
context.strokeStyle = '#a64ceb';
context.strokeRect(rect.x, rect.y, rect.width, rect.height);
context.font = '11px Helvetica';
context.fillStyle = "#fff";
context.fillText('x: ' + rect.x + 'px', rect.x + rect.width + 5, rect.y + 11);
context.fillText('y: ' + rect.y + 'px', rect.x + rect.width + 5, rect.y + 22);
});
///////////
if (collecting.value == true) {
trackerTask.stop();
const myVideo = document.querySelector("#myVideo") as HTMLVideoElement;
context.drawImage(myVideo, 0, 0, myCanvas.value.width, myCanvas.value.height);
img.value = myCanvas.value.toDataURL("image/png")
}
}
})
}
const up = () => {
}
</script>
<style scoped>
.wrapp {
height: 300px;
background: url('/images/bg.jpeg') no-repeat;
background-size: 100% 100%;
padding-top: 10px;
}
.videoWrapp {
width: 200px;
height: 200px;
margin: auto;
margin-top: 30px;
position: relative;
}
#myCanvas {
position: absolute;
top: 0;
left: 0;
border-radius: 50%;
width: 100%;
height: 100%;
}
#myVideo {
width: 100%;
height: 100%;
border-radius: 50%;
object-fit: cover;
}
.status {
text-align: center;
}
.cjbt {
text-align: center;
margin-top: 10px;
}
.rect {
border: 2px solid #081797;
left: -1000px;
position: absolute;
top: -1000px;
}
.img {
border: 1px solid #cccccc;
border-radius: 5px;
width: 100px;
height: 100px;
}
</style>
此处可能存在不合适展示的内容,页面不予展示。您可通过相关编辑功能自查并修改。
如您确认内容无涉及 不当用语 / 纯广告导流 / 暴力 / 低俗色情 / 侵权 / 盗版 / 虚假 / 无价值内容或违法国家有关法律法规的内容,可点击提交进行申诉,我们将尽快为您处理。