1 Star 0 Fork 3

aalee / Kinect动作捕捉软件_1

加入 Gitee
与超过 1200万 开发者一起发现、参与优秀开源项目,私有仓库也完全免费 :)
免费加入
克隆/下载
Kinect1Func.cpp 19.14 KB
一键复制 编辑 原始数据 按行查看 历史
FelixWang810 提交于 2020-09-18 15:11 . 功能代码
#include <windows.h>
#include "Kinect1Func.h"
#include <iostream>
#include <fstream>
#include <string>
Kinect1Func::Kinect1Func() {}
int Kinect1Func::cameraCapture(int CapFreq,int CapTimes) {
cv::Mat colorImg;
colorImg.create(480, 640, CV_8UC3);
cv::Mat depthImg;
depthImg.create(480, 640, CV_16UC1);
cv::Mat maskImg;
maskImg.create(480, 640, CV_8UC1);
cv::Mat skeletonImg;
skeletonImg.create(480, 640, CV_8UC3);
HANDLE colorEvent = CreateEvent(NULL, TRUE, FALSE, NULL);
HANDLE depthEvent = CreateEvent(NULL, TRUE, FALSE, NULL);
HANDLE skeletonEvent = CreateEvent(NULL, TRUE, FALSE, NULL);
HANDLE colorStreamHandle = NULL;
HANDLE depthStreamHandle = NULL;
HRESULT hr;
hr = NuiInitialize(NUI_INITIALIZE_FLAG_USES_COLOR | NUI_INITIALIZE_FLAG_USES_DEPTH_AND_PLAYER_INDEX
| NUI_INITIALIZE_FLAG_USES_SKELETON);
if (FAILED(hr))
{
std::cout << "Nui initialize failed." << std::endl;
return hr;
}
hr = NuiImageStreamOpen(NUI_IMAGE_TYPE_COLOR, NUI_IMAGE_RESOLUTION_640x480, 0, 2, colorEvent, &colorStreamHandle);
if (FAILED(hr))
{
std::cout << "Can not open color stream." << std::endl;
return hr;
}
hr = NuiImageStreamOpen(NUI_IMAGE_TYPE_DEPTH_AND_PLAYER_INDEX, NUI_IMAGE_RESOLUTION_640x480, 0, 2, depthEvent, &depthStreamHandle);
if (FAILED(hr))
{
std::cout << "Can not open depth stream." << std::endl;
return hr;
}
hr = NuiSkeletonTrackingEnable(skeletonEvent, 0);
if (FAILED(hr))
{
std::cout << "Could not open skeleton tracking event" << std::endl;
NuiShutdown();
return hr;
}
mycout.open("skeletondata.txt", std::ios::trunc);
int n = 0;
cv::String colorImgName = "colorImg" + std::to_string(n) + ".png";
cv::String depthImgName = "depthImg" + std::to_string(n) + ".png";
cv::String maskImgName = "maskImg" + std::to_string(n) + ".png";
int count = 0;
bstopflag = true;
while (1)
{
//colorImgName = "colorImg" + std::to_string(n) + ".png";
//depthImgName = "depthImg" + std::to_string(n) + ".png";
//maskImgName = "maskImg" + std::to_string(n) + ".png";
if (WaitForSingleObject(depthEvent, 0) == 0)
{
getDepthImage(depthStreamHandle, depthImg, maskImg);
}
if (WaitForSingleObject(colorEvent, 0) == 0)
{
getColorImage(colorStreamHandle, colorImg);
count++;
}
if (WaitForSingleObject(skeletonEvent, 0) == 0 && n % CapFreq == 0)
{
getSkeletonImage(skeletonImg, colorImg);
}
//cv::imshow("colorImg", colorImg);
//cv::imshow("depthImg", depthImg);
//cv::imshow("maskImg", maskImg);
cv::imshow("skeletonImg", skeletonImg);
//if (n % CapFreq == 0)
//{
// cv::imwrite(colorImgName, colorImg);
// cv::imwrite(depthImgName, depthImg);
// cv::imwrite(maskImgName, maskImg);
//}
if (count == CapFreq * CapTimes)
{
mycout.close();
break;
}
n++;
cv::waitKey(5);
if (!bstopflag)
{
break;
}
}
NuiShutdown();
cv::destroyAllWindows();
return 0;
}
int Kinect1Func::cameraShutDown() {
NuiShutdown();
bstopflag = false;
cv::destroyAllWindows();
return 0;
}
BGR Kinect1Func::Depth2RGB(USHORT depthID)
{
//每像素共16bit的信息,其中最低3位是ID(所捕捉到的人的ID),剩下的13位才是信息
USHORT realDepth = (depthID & 0xfff8) >> 3; //深度信息,高13位
USHORT player = depthID & 0x0007; //提取用户ID信息,低3位
//因为提取的信息是距离信息,为了便于显示,这里归一化为0-255
BYTE depth = (BYTE)(255 * realDepth / 0x1fff);
BGR color_data;
color_data.blue = color_data.green = color_data.red = 0;
//RGB三个通道的值都是相等的话,就是灰度的
//Kinect系统能够处理辨识传感器前多至6个人物的信息,但同一时刻最多只有2个玩家可被追踪(即骨骼跟踪)
switch (player)
{
case 0:
color_data.blue = depth / 2;
color_data.green = depth / 2;
color_data.red = depth / 2;
break;
case 1:
color_data.red = depth;
break;
case 2:
color_data.green = depth;
break;
case 3:
color_data.blue = depth;
break;
case 4:
color_data.blue = depth;
color_data.green = depth;
color_data.red = depth / 4;
break;
case 5:
color_data.blue = depth;
color_data.green = depth / 4;
color_data.red = depth;
break;
case 6:
color_data.blue = depth / 4;
color_data.green = depth;
color_data.red = depth;
break;
}
return color_data;
}
void Kinect1Func::getColorImage(HANDLE & colorStreamHandle, cv::Mat & colorImg)
{
const NUI_IMAGE_FRAME * pImageFrame = NULL;
HRESULT hr = NuiImageStreamGetNextFrame(colorStreamHandle, 0, &pImageFrame);
if (FAILED(hr))
{
std::cout << "Could not get color image" << std::endl;
NuiShutdown();
return;
}
INuiFrameTexture * pTexture = pImageFrame->pFrameTexture;
NUI_LOCKED_RECT LockedRect;
pTexture->LockRect(0, &LockedRect, NULL, 0);
if (LockedRect.Pitch != 0)
{
for (int i = 0; i < colorImg.rows; i++)
{
uchar *ptr = colorImg.ptr<uchar>(i); //第i行的指针
//每个字节代表一个颜色信息,直接使用uchar
uchar *pBuffer = (uchar*)(LockedRect.pBits) + i * LockedRect.Pitch;
for (int j = 0; j < colorImg.cols; j++)
{
//内部数据是4个字节,0-1-2是BGR,第4个现在未使用
ptr[3 * j] = pBuffer[4 * j];
ptr[3 * j + 1] = pBuffer[4 * j + 1];
ptr[3 * j + 2] = pBuffer[4 * j + 2];
}
}
}
else
{
std::cout << "捕获彩色图像出错" << std::endl;
}
pTexture->UnlockRect(0);
NuiImageStreamReleaseFrame(colorStreamHandle, pImageFrame);
}
void Kinect1Func::getDepthImage(HANDLE & depthStreamHandle, cv::Mat & depthImg, cv::Mat & mask)
{
const NUI_IMAGE_FRAME * pImageFrame = NULL;
HRESULT hr = NuiImageStreamGetNextFrame(depthStreamHandle, 0, &pImageFrame);
if (FAILED(hr))
{
std::cout << "Could not get depth image" << std::endl;
NuiShutdown();
return;
}
INuiFrameTexture * pTexture = pImageFrame->pFrameTexture;
NUI_LOCKED_RECT LockedRect;
pTexture->LockRect(0, &LockedRect, NULL, 0);
cv::Mat depthMask = cv::Mat(depthImg.size(), CV_8UC3);
if (LockedRect.Pitch != 0)
{
for (int i = 0; i < depthImg.rows; i++)
{
uchar * ptr = depthMask.ptr<uchar>(i);
uchar * ptrM = mask.ptr<uchar>(i);
ushort * ptrD = depthImg.ptr<ushort>(i);
uchar *pBufferRun = (uchar*)(LockedRect.pBits) + i * LockedRect.Pitch;
USHORT * pBuffer = (USHORT*)pBufferRun;
for (int j = 0; j < depthImg.cols; j++)
{
// ptrD[j] = 255 - (uchar)(255 * pBuffer[j] / 0x0fff); //直接将数据归一化处理
//ptrD[j] = (uchar)(255 * ((pBuffer[j] & 0xfff8) >> 3) / 0x0fff); //直接将数据归一化处理
ptrD[j] = pBuffer[j];
BGR rgb = Depth2RGB(pBuffer[j]);
ptr[3 * j] = rgb.blue;
ptr[3 * j + 1] = rgb.green;
ptr[3 * j + 2] = rgb.red;
if (rgb.blue != rgb.green || rgb.green != rgb.red)
{
//LONG colorx, colory;
//NuiImageGetColorPixelCoordinatesFromDepthPixel(NUI_IMAGE_RESOLUTION_640x480, 0, (LONG)j, (LONG)i, 0, &colorx, &colory);
ptrM[j] = 255;
}
else
{
//LONG colorx, colory;
//NuiImageGetColorPixelCoordinatesFromDepthPixel(NUI_IMAGE_RESOLUTION_640x480, 0, (LONG)j, (LONG)i, 0, &colorx, &colory);
ptrM[j] = 0;
}
}
}
}
else
{
std::cout << "捕获深度图像出错" << std::endl;
}
pTexture->UnlockRect(0);
NuiImageStreamReleaseFrame(depthStreamHandle, pImageFrame);
}
void Kinect1Func::drawSkeleton(cv::Mat &img, cv::Point pointSet[], int which_one)
{
cv::Scalar color;
switch (which_one)
{
case 0:
color = cv::Scalar(255, 0, 0);
break;
case 1:
color = cv::Scalar(0, 255, 0);
break;
case 2:
color = cv::Scalar(0, 0, 255);
break;
case 3:
color = cv::Scalar(255, 255, 0);
break;
case 4:
color = cv::Scalar(255, 0, 255);
break;
case 5:
color = cv::Scalar(0, 255, 255);
break;
}
// 脊柱
if ((pointSet[NUI_SKELETON_POSITION_HEAD].x != 0 || pointSet[NUI_SKELETON_POSITION_HEAD].y != 0) &&
(pointSet[NUI_SKELETON_POSITION_SHOULDER_CENTER].x != 0 || pointSet[NUI_SKELETON_POSITION_SHOULDER_CENTER].y != 0))
cv::line(img, pointSet[NUI_SKELETON_POSITION_HEAD], pointSet[NUI_SKELETON_POSITION_SHOULDER_CENTER], color, 2);
if ((pointSet[NUI_SKELETON_POSITION_SHOULDER_CENTER].x != 0 || pointSet[NUI_SKELETON_POSITION_SHOULDER_CENTER].y != 0) &&
(pointSet[NUI_SKELETON_POSITION_SPINE].x != 0 || pointSet[NUI_SKELETON_POSITION_SPINE].y != 0))
cv::line(img, pointSet[NUI_SKELETON_POSITION_SHOULDER_CENTER], pointSet[NUI_SKELETON_POSITION_SPINE], color, 2);
if ((pointSet[NUI_SKELETON_POSITION_SPINE].x != 0 || pointSet[NUI_SKELETON_POSITION_SPINE].y != 0) &&
(pointSet[NUI_SKELETON_POSITION_HIP_CENTER].x != 0 || pointSet[NUI_SKELETON_POSITION_HIP_CENTER].y != 0))
cv::line(img, pointSet[NUI_SKELETON_POSITION_SPINE], pointSet[NUI_SKELETON_POSITION_HIP_CENTER], color, 2);
// 左上肢
if ((pointSet[NUI_SKELETON_POSITION_SHOULDER_CENTER].x != 0 || pointSet[NUI_SKELETON_POSITION_SHOULDER_CENTER].y != 0) &&
(pointSet[NUI_SKELETON_POSITION_SHOULDER_LEFT].x != 0 || pointSet[NUI_SKELETON_POSITION_SHOULDER_LEFT].y != 0))
cv::line(img, pointSet[NUI_SKELETON_POSITION_SHOULDER_CENTER], pointSet[NUI_SKELETON_POSITION_SHOULDER_LEFT], color, 2);
if ((pointSet[NUI_SKELETON_POSITION_SHOULDER_LEFT].x != 0 || pointSet[NUI_SKELETON_POSITION_SHOULDER_LEFT].y != 0) &&
(pointSet[NUI_SKELETON_POSITION_ELBOW_LEFT].x != 0 || pointSet[NUI_SKELETON_POSITION_ELBOW_LEFT].y != 0))
cv::line(img, pointSet[NUI_SKELETON_POSITION_SHOULDER_LEFT], pointSet[NUI_SKELETON_POSITION_ELBOW_LEFT], color, 2);
if ((pointSet[NUI_SKELETON_POSITION_ELBOW_LEFT].x != 0 || pointSet[NUI_SKELETON_POSITION_ELBOW_LEFT].y != 0) &&
(pointSet[NUI_SKELETON_POSITION_WRIST_LEFT].x != 0 || pointSet[NUI_SKELETON_POSITION_WRIST_LEFT].y != 0))
cv::line(img, pointSet[NUI_SKELETON_POSITION_ELBOW_LEFT], pointSet[NUI_SKELETON_POSITION_WRIST_LEFT], color, 2);
if ((pointSet[NUI_SKELETON_POSITION_WRIST_LEFT].x != 0 || pointSet[NUI_SKELETON_POSITION_WRIST_LEFT].y != 0) &&
(pointSet[NUI_SKELETON_POSITION_HAND_LEFT].x != 0 || pointSet[NUI_SKELETON_POSITION_HAND_LEFT].y != 0))
cv::line(img, pointSet[NUI_SKELETON_POSITION_WRIST_LEFT], pointSet[NUI_SKELETON_POSITION_HAND_LEFT], color, 2);
// 右上肢
if ((pointSet[NUI_SKELETON_POSITION_SHOULDER_CENTER].x != 0 || pointSet[NUI_SKELETON_POSITION_SHOULDER_CENTER].y != 0) &&
(pointSet[NUI_SKELETON_POSITION_SHOULDER_RIGHT].x != 0 || pointSet[NUI_SKELETON_POSITION_SHOULDER_RIGHT].y != 0))
cv::line(img, pointSet[NUI_SKELETON_POSITION_SHOULDER_CENTER], pointSet[NUI_SKELETON_POSITION_SHOULDER_RIGHT], color, 2);
if ((pointSet[NUI_SKELETON_POSITION_SHOULDER_RIGHT].x != 0 || pointSet[NUI_SKELETON_POSITION_SHOULDER_RIGHT].y != 0) &&
(pointSet[NUI_SKELETON_POSITION_ELBOW_RIGHT].x != 0 || pointSet[NUI_SKELETON_POSITION_ELBOW_RIGHT].y != 0))
cv::line(img, pointSet[NUI_SKELETON_POSITION_SHOULDER_RIGHT], pointSet[NUI_SKELETON_POSITION_ELBOW_RIGHT], color, 2);
if ((pointSet[NUI_SKELETON_POSITION_ELBOW_RIGHT].x != 0 || pointSet[NUI_SKELETON_POSITION_ELBOW_RIGHT].y != 0) &&
(pointSet[NUI_SKELETON_POSITION_WRIST_RIGHT].x != 0 || pointSet[NUI_SKELETON_POSITION_WRIST_RIGHT].y != 0))
cv::line(img, pointSet[NUI_SKELETON_POSITION_ELBOW_RIGHT], pointSet[NUI_SKELETON_POSITION_WRIST_RIGHT], color, 2);
if ((pointSet[NUI_SKELETON_POSITION_WRIST_RIGHT].x != 0 || pointSet[NUI_SKELETON_POSITION_WRIST_RIGHT].y != 0) &&
(pointSet[NUI_SKELETON_POSITION_HAND_RIGHT].x != 0 || pointSet[NUI_SKELETON_POSITION_HAND_RIGHT].y != 0))
cv::line(img, pointSet[NUI_SKELETON_POSITION_WRIST_RIGHT], pointSet[NUI_SKELETON_POSITION_HAND_RIGHT], color, 2);
// 左下肢
if ((pointSet[NUI_SKELETON_POSITION_HIP_CENTER].x != 0 || pointSet[NUI_SKELETON_POSITION_HIP_CENTER].y != 0) &&
(pointSet[NUI_SKELETON_POSITION_HIP_LEFT].x != 0 || pointSet[NUI_SKELETON_POSITION_HIP_LEFT].y != 0))
cv::line(img, pointSet[NUI_SKELETON_POSITION_HIP_CENTER], pointSet[NUI_SKELETON_POSITION_HIP_LEFT], color, 2);
if ((pointSet[NUI_SKELETON_POSITION_HIP_LEFT].x != 0 || pointSet[NUI_SKELETON_POSITION_HIP_LEFT].y != 0) &&
(pointSet[NUI_SKELETON_POSITION_KNEE_LEFT].x != 0 || pointSet[NUI_SKELETON_POSITION_KNEE_LEFT].y != 0))
cv::line(img, pointSet[NUI_SKELETON_POSITION_HIP_LEFT], pointSet[NUI_SKELETON_POSITION_KNEE_LEFT], color, 2);
if ((pointSet[NUI_SKELETON_POSITION_KNEE_LEFT].x != 0 || pointSet[NUI_SKELETON_POSITION_KNEE_LEFT].y != 0) &&
(pointSet[NUI_SKELETON_POSITION_ANKLE_LEFT].x != 0 || pointSet[NUI_SKELETON_POSITION_ANKLE_LEFT].y != 0))
cv::line(img, pointSet[NUI_SKELETON_POSITION_KNEE_LEFT], pointSet[NUI_SKELETON_POSITION_ANKLE_LEFT], color, 2);
if ((pointSet[NUI_SKELETON_POSITION_ANKLE_LEFT].x != 0 || pointSet[NUI_SKELETON_POSITION_ANKLE_LEFT].y != 0) &&
(pointSet[NUI_SKELETON_POSITION_FOOT_LEFT].x != 0 || pointSet[NUI_SKELETON_POSITION_FOOT_LEFT].y != 0))
cv::line(img, pointSet[NUI_SKELETON_POSITION_ANKLE_LEFT], pointSet[NUI_SKELETON_POSITION_FOOT_LEFT], color, 2);
// 右下肢
if ((pointSet[NUI_SKELETON_POSITION_HIP_CENTER].x != 0 || pointSet[NUI_SKELETON_POSITION_HIP_CENTER].y != 0) &&
(pointSet[NUI_SKELETON_POSITION_HIP_RIGHT].x != 0 || pointSet[NUI_SKELETON_POSITION_HIP_RIGHT].y != 0))
cv::line(img, pointSet[NUI_SKELETON_POSITION_HIP_CENTER], pointSet[NUI_SKELETON_POSITION_HIP_RIGHT], color, 2);
if ((pointSet[NUI_SKELETON_POSITION_HIP_RIGHT].x != 0 || pointSet[NUI_SKELETON_POSITION_HIP_RIGHT].y != 0) &&
(pointSet[NUI_SKELETON_POSITION_KNEE_RIGHT].x != 0 || pointSet[NUI_SKELETON_POSITION_KNEE_RIGHT].y != 0))
cv::line(img, pointSet[NUI_SKELETON_POSITION_HIP_RIGHT], pointSet[NUI_SKELETON_POSITION_KNEE_RIGHT], color, 2);
if ((pointSet[NUI_SKELETON_POSITION_KNEE_RIGHT].x != 0 || pointSet[NUI_SKELETON_POSITION_KNEE_RIGHT].y != 0) &&
(pointSet[NUI_SKELETON_POSITION_ANKLE_RIGHT].x != 0 || pointSet[NUI_SKELETON_POSITION_ANKLE_RIGHT].y != 0))
cv::line(img, pointSet[NUI_SKELETON_POSITION_KNEE_RIGHT], pointSet[NUI_SKELETON_POSITION_ANKLE_RIGHT], color, 2);
if ((pointSet[NUI_SKELETON_POSITION_ANKLE_RIGHT].x != 0 || pointSet[NUI_SKELETON_POSITION_ANKLE_RIGHT].y != 0) &&
(pointSet[NUI_SKELETON_POSITION_FOOT_RIGHT].x != 0 || pointSet[NUI_SKELETON_POSITION_FOOT_RIGHT].y != 0))
cv::line(img, pointSet[NUI_SKELETON_POSITION_ANKLE_RIGHT], pointSet[NUI_SKELETON_POSITION_FOOT_RIGHT], color, 2);
}
void Kinect1Func::getSkeletonImage(cv::Mat & skeletonImg, cv::Mat & colorImg)
{
NUI_SKELETON_FRAME skeletonFrame = { 0 }; //骨骼帧的定义
bool foundSkeleton = false;
HRESULT hr = NuiSkeletonGetNextFrame(0, &skeletonFrame);
if (SUCCEEDED(hr))
{
//NUI_SKELETON_COUNT是检测到的骨骼数(即,跟踪到的人数)
for (int i = 0; i < NUI_SKELETON_COUNT; i++)
{
NUI_SKELETON_TRACKING_STATE trackingState = skeletonFrame.SkeletonData[i].eTrackingState;
// Kinect最多检测到6个人,但只能跟踪2个人的骨骼,再检查是否跟踪到了
if (trackingState == NUI_SKELETON_TRACKED)
{
foundSkeleton = true;
}
}
}
if (!foundSkeleton)
{
return;
}
NuiTransformSmooth(&skeletonFrame, NULL);
skeletonImg.setTo(0);
for (int i = 0; i < NUI_SKELETON_COUNT; i++)
{
// 判断是否是一个正确骨骼的条件:骨骼被跟踪到并且肩部中心(颈部位置)必须跟踪到
if (skeletonFrame.SkeletonData[i].eTrackingState == NUI_SKELETON_TRACKED && skeletonFrame.SkeletonData[i].eSkeletonPositionTrackingState[NUI_SKELETON_POSITION_SHOULDER_CENTER] != NUI_SKELETON_POSITION_NOT_TRACKED)
{
float fx, fy;
// 拿到所有跟踪到的关节点的坐标,并转换为我们的深度空间的坐标,因为我们是在深度图像中
// 把这些关节点标记出来的
// NUI_SKELETON_POSITION_COUNT为跟踪到的一个骨骼的关节点的数目,为20
for (int j = 0; j < NUI_SKELETON_POSITION_COUNT; j++)
{
NuiTransformSkeletonToDepthImage(skeletonFrame.SkeletonData[i].SkeletonPositions[j], &fx, &fy);
skeletonPoint[i][j].x = (int)fx*2.0;
skeletonPoint[i][j].y = (int)fy*2.0;
// mycout << skeletonFrame.SkeletonData[i].SkeletonPositions[j].x*100 <<" "<< skeletonFrame.SkeletonData[i].SkeletonPositions[j].y*100 << " " << skeletonFrame.SkeletonData[i].SkeletonPositions[j].z * 100 << endl;
mycout << skeletonFrame.SkeletonData[i].SkeletonPositions[j].x * 100 << " " << skeletonFrame.SkeletonData[i].SkeletonPositions[j].y * 100 << " " << skeletonFrame.SkeletonData[i].SkeletonPositions[j].z * 100 << std::endl;
//mycout << skeletonPoint[i][j].x << " " << skeletonPoint[i][j].y << " " << skeletonFrame.SkeletonData[i].SkeletonPositions[j].z * 100 << endl;
}
for (int j = 0; j < NUI_SKELETON_POSITION_COUNT; j++)
{
if (skeletonFrame.SkeletonData[i].eSkeletonPositionTrackingState[j] != NUI_SKELETON_POSITION_NOT_TRACKED)
{
cv::circle(skeletonImg, skeletonPoint[i][j], 3, cv::Scalar(0, 255, 255), 1, 8, 0);
tracked[i] = true;
// 在彩色图中也绘制骨骼关键点
LONG color_x, color_y;
NuiImageGetColorPixelCoordinatesFromDepthPixel(NUI_IMAGE_RESOLUTION_640x480, 0,
skeletonPoint[i][j].x, skeletonPoint[i][j].y, 0, &color_x, &color_y);
colorPoint[i][j].x = (int)color_x/2.0;
colorPoint[i][j].y = (int)color_y/2.0;
cv::circle(colorImg, colorPoint[i][j], 4, cv::Scalar(0, 255, 255), 1, 8, 0);
}
}
drawSkeleton(skeletonImg, skeletonPoint[i], i);
drawSkeleton(colorImg, colorPoint[i], i);
}
}
}
马建仓 AI 助手
尝试更多
代码解读
代码找茬
代码优化
C++
1
https://gitee.com/aalee/kinect-motion-capture-software_1.git
git@gitee.com:aalee/kinect-motion-capture-software_1.git
aalee
kinect-motion-capture-software_1
Kinect动作捕捉软件_1
master

搜索帮助

344bd9b3 5694891 D2dac590 5694891