想用别人的C++程序进行kinect采集数据并记录
程序如下,红色的几句是我自己加的,但是好像无法从skeletonFrame里面输出数据?#include "stdafx.h"
#include <windows.h>
#include <iostream>
#include <NuiApi.h>
#include <opencv2/opencv.hpp>
#include<fstream>
using namespace std;
using namespace cv;
//通过传入关节点的位置,把骨骼画出来
void drawSkeleton(Mat &image, CvPoint pointSet[], int whichone);
int main(int argc, char *argv[])
{
Mat skeletonImage;
skeletonImage.create(240, 320, CV_8UC3);
CvPoint skeletonPoint={cvPoint(0,0)};
bool tracked={FALSE};
//1、初始化NUI,注意这里是USES_SKELETON
HRESULT hr = NuiInitialize(NUI_INITIALIZE_FLAG_USES_SKELETON);
if (FAILED(hr))
{
cout<<"NuiInitialize failed"<<endl;
return hr;
}
//2、定义骨骼信号事件句柄
HANDLE skeletonEvent = CreateEvent( NULL, TRUE, FALSE, NULL );
//3、打开骨骼跟踪事件
hr = NuiSkeletonTrackingEnable( skeletonEvent, 0 );
if( FAILED( hr ) )
{
cout<<"Could not open color image stream video"<<endl;
NuiShutdown();
return hr;
}
namedWindow("skeletonImage", CV_WINDOW_AUTOSIZE);
//4、开始读取骨骼跟踪数据
while(1)
{
NUI_SKELETON_FRAME skeletonFrame = {0};//骨骼帧的定义
bool bFoundSkeleton = false;
//4.1、无限等待新的数据,等到后返回
if (WaitForSingleObject(skeletonEvent, INFINITE)==0)
{
//4.2、从刚才打开数据流的流句柄中得到该帧数据,读取到的数据地址存于skeletonFrame
hr = NuiSkeletonGetNextFrame( 0, &skeletonFrame);
ofstream out("data.txt",ios::out);
out<<skeletonFrame<<endl;
out.close();
if (SUCCEEDED(hr))
{
//NUI_SKELETON_COUNT是检测到的骨骼数(即,跟踪到的人数)
for( int i = 0 ; i < NUI_SKELETON_COUNT ; i++ )
{
NUI_SKELETON_TRACKING_STATE trackingState = skeletonFrame.SkeletonData.eTrackingState;
//4.3、Kinect最多检测六个人,但只能跟踪两个人的骨骼,再检查每个“人”(有可能是空,不是人)
//是否跟踪到了
if( trackingState == NUI_SKELETON_TRACKED )
{
bFoundSkeleton = true;
}
}
}
if( !bFoundSkeleton )
{
continue;
}
//4.4、平滑骨骼帧,消除抖动
NuiTransformSmooth(&skeletonFrame, NULL);
skeletonImage.setTo(0);
for( int i = 0 ; i < NUI_SKELETON_COUNT ; i++ )
{
// Show skeleton only if it is tracked, and the center-shoulder joint is at least inferred.
//断定是否是一个正确骨骼的条件:骨骼被跟踪到并且肩部中心(颈部位置)必须跟踪到。
if( skeletonFrame.SkeletonData.eTrackingState == NUI_SKELETON_TRACKED &&
skeletonFrame.SkeletonData.eSkeletonPositionTrackingState != NUI_SKELETON_POSITION_NOT_TRACKED)
{
float fx, fy;
//拿到所有跟踪到的关节点的坐标,并转换为我们的深度空间的坐标,因为我们是在深度图像中
//把这些关节点标记出来的
//NUI_SKELETON_POSITION_COUNT为跟踪到的一个骨骼的关节点的数目,为20
for (int j = 0; j < NUI_SKELETON_POSITION_COUNT; j++)
{
NuiTransformSkeletonToDepthImage(skeletonFrame.SkeletonData.SkeletonPositions, &fx, &fy );
skeletonPoint.x = (int)fx;
skeletonPoint.y = (int)fy;
}
for (int j=0; j<NUI_SKELETON_POSITION_COUNT ; j++)
{
if (skeletonFrame.SkeletonData.eSkeletonPositionTrackingState != NUI_SKELETON_POSITION_NOT_TRACKED)//跟踪点一共有三种状态:1没有被跟踪到,2跟踪到,3根据跟踪到的估计到
{
circle(skeletonImage, skeletonPoint, 3, cvScalar(0, 255, 255), 1, 8, 0);
tracked = TRUE;
}
}
drawSkeleton(skeletonImage, skeletonPoint, i);
}
}
imshow("skeletonImage", skeletonImage); //显示图像
}
else
{
cout<<"Buffer length of received texture is bogus\r\n"<<endl;
}
if (cvWaitKey(20) == 27)
break;
}
//5、关闭NUI链接
NuiShutdown();
return 0;
}
//通过传入关节点的位置,把骨骼画出来
void drawSkeleton(Mat &image, CvPoint pointSet[], int whichone)
{
CvScalar color;
switch(whichone) //跟踪不同的人显示不同的颜色
{
case 0:
color = cvScalar(255);
break;
case 1:
color = cvScalar(0,255);
break;
case 2:
color = cvScalar(0, 0, 255);
break;
case 3:
color = cvScalar(255, 255, 0);
break;
case 4:
color = cvScalar(255, 0, 255);
break;
case 5:
color = cvScalar(0, 255, 255);
break;
}
if((pointSet.x!=0 || pointSet.y!=0) &&
(pointSet.x!=0 || pointSet.y!=0))
line(image, pointSet, pointSet, color, 2);
if((pointSet.x!=0 || pointSet.y!=0) &&
(pointSet.x!=0 || pointSet.y!=0))
line(image, pointSet, pointSet, color, 2);
if((pointSet.x!=0 || pointSet.y!=0) &&
(pointSet.x!=0 || pointSet.y!=0))
line(image, pointSet, pointSet, color, 2);
//左上肢
if((pointSet.x!=0 || pointSet.y!=0) &&
(pointSet.x!=0 || pointSet.y!=0))
line(image, pointSet, pointSet, color, 2);
if((pointSet.x!=0 || pointSet.y!=0) &&
(pointSet.x!=0 || pointSet.y!=0))
line(image, pointSet, pointSet, color, 2);
if((pointSet.x!=0 || pointSet.y!=0) &&
(pointSet.x!=0 || pointSet.y!=0))
line(image, pointSet, pointSet, color, 2);
if((pointSet.x!=0 || pointSet.y!=0) &&
(pointSet.x!=0 || pointSet.y!=0))
line(image, pointSet, pointSet, color, 2);
//右上肢
if((pointSet.x!=0 || pointSet.y!=0) &&
(pointSet.x!=0 || pointSet.y!=0))
line(image, pointSet, pointSet, color, 2);
if((pointSet.x!=0 || pointSet.y!=0) &&
(pointSet.x!=0 || pointSet.y!=0))
line(image, pointSet, pointSet, color, 2);
if((pointSet.x!=0 || pointSet.y!=0) &&
(pointSet.x!=0 || pointSet.y!=0))
line(image, pointSet, pointSet, color, 2);
if((pointSet.x!=0 || pointSet.y!=0) &&
(pointSet.x!=0 || pointSet.y!=0))
line(image, pointSet, pointSet, color, 2);
//左下肢
if((pointSet.x!=0 || pointSet.y!=0) &&
(pointSet.x!=0 || pointSet.y!=0))
line(image, pointSet, pointSet, color, 2);
if((pointSet.x!=0 || pointSet.y!=0) &&
(pointSet.x!=0 || pointSet.y!=0))
line(image, pointSet, pointSet, color, 2);
if((pointSet.x!=0 || pointSet.y!=0) &&
(pointSet.x!=0 || pointSet.y!=0))
line(image, pointSet, pointSet, color, 2);
if((pointSet.x!=0 || pointSet.y!=0) &&
(pointSet.x!=0 || pointSet.y!=0))
line(image, pointSet, pointSet, color, 2);
//右下肢
if((pointSet.x!=0 || pointSet.y!=0) &&
(pointSet.x!=0 || pointSet.y!=0))
line(image, pointSet, pointSet, color, 2);
if((pointSet.x!=0 || pointSet.y!=0) &&
(pointSet.x!=0 || pointSet.y!=0))
line(image, pointSet, pointSet,color, 2);
if((pointSet.x!=0 || pointSet.y!=0) &&
(pointSet.x!=0 || pointSet.y!=0))
line(image, pointSet, pointSet, color, 2);
if((pointSet.x!=0 || pointSet.y!=0) &&
(pointSet.x!=0 || pointSet.y!=0))
line(image, pointSet, pointSet, color, 2);
} 把out<<skeletonFrame<<endl;改成了out<<&skeletonFrame<<endl; 但是还是只能显示图像,并没有生成data.txt这个文件,也没有数据{:10_266:}
页:
[1]