[ create a new paste ] login | about

Link: http://codepad.org/zBmlQlX8    [ raw code | output | fork ]

C++, pasted on Mar 28:
#if _MSC_VER < 1600
typedef __int64 int64_t;
typedef unsigned __int64 uint64_t;
typedef unsigned __int32 uint32_t;
typedef unsigned __int16 uint16_t;
typedef unsigned __int8 uint8_t;
#endif

#include <highgui.h>
#include <cv.h>

#include <OpenNI.h>

#include <NiTE.h>

#include <iostream>

int main(void)
{
	openni::OpenNI::initialize();
	nite::NiTE::initialize();

	openni::Device device;
	device.open(openni::ANY_DEVICE);
	device.setImageRegistrationMode(openni::IMAGE_REGISTRATION_DEPTH_TO_COLOR);

	nite::UserTracker user_tracker;
	user_tracker.create();
	user_tracker.setSkeletonSmoothingFactor(0.1f);

	nite::HandTracker hand_tracker;
	hand_tracker.create();
	hand_tracker.startGestureDetection(nite::GESTURE_CLICK);
	hand_tracker.startGestureDetection(nite::GESTURE_WAVE);
	hand_tracker.startGestureDetection(nite::GESTURE_HAND_RAISE);

	openni::VideoMode color_mode;
	color_mode.setFps(30);
	color_mode.setResolution(640,480);
	color_mode.setPixelFormat(openni::PIXEL_FORMAT_RGB888);

	openni::VideoStream color_stream;
	color_stream.setVideoMode(color_mode);
	color_stream.create(device,openni::SENSOR_COLOR);
	color_stream.start();

	openni::VideoMode depth_mode;
	depth_mode.setFps(30);
	depth_mode.setResolution(640,480);
	depth_mode.setPixelFormat(openni::PIXEL_FORMAT_DEPTH_100_UM);

	openni::VideoStream depth_stream;
	depth_stream.setVideoMode(depth_mode);
	depth_stream.create(device,openni::SENSOR_DEPTH);
	depth_stream.start();
	int max_depth = depth_stream.getMaxPixelValue();

	cvNamedWindow("Color",CV_WINDOW_AUTOSIZE);
	cvNamedWindow("Depth",CV_WINDOW_AUTOSIZE);
	cvNamedWindow("Canny",CV_WINDOW_AUTOSIZE);

	openni::VideoFrameRef color_frame;
	openni::VideoFrameRef depth_frame;
	nite::UserTrackerFrameRef user_tracker_frame;
	nite::HandTrackerFrameRef hand_tracker_frame;

	while(true){
		color_stream.readFrame(&color_frame);
		depth_stream.readFrame(&depth_frame);
		user_tracker.readFrame(&user_tracker_frame);
		hand_tracker.readFrame(&hand_tracker_frame);

		cv::Mat RGBMat(color_frame.getHeight(),color_frame.getWidth(),CV_8UC3,(void*)color_frame.getData());
		IplImage color_image(RGBMat);
		cvCvtColor(&color_image,&color_image,CV_RGB2BGR);

		const nite::Array<nite::GestureData> &gestures = hand_tracker_frame.getGestures();
		CvPoint2D32f position;
		
		for(int i = 0;i < gestures.getSize();++i){
				const nite::Point3f &pos = gestures[i].getCurrentPosition();
				nite::HandId hand_id;
				hand_tracker.startHandTracking(pos,&hand_id);
		}

		const nite::Array<nite::HandData> &hands = hand_tracker_frame.getHands();
		for(int i = 0;i < hands.getSize();++i){
			const nite::HandData hand = hands[i];

			if(hand.isTracking()){
				const nite::Point3f &pos = hand.getPosition();
				//std::cout << pos.x << " , " << pos.y << " , " << pos.z << "\t";

				hand_tracker.convertHandCoordinatesToDepth(pos.x,pos.y,pos.z,&position.x,&position.y);
				cvCircle(&color_image,cvPoint(position.x,position.y),3,CV_RGB(255,0,0),3,CV_AA);
			}

			if(hand.isTouchingFov()){
				//std::cout << "Touching Fov";
			}

			//std::cout << std::endl;
		}

		const nite::Array<nite::UserData> &users = user_tracker_frame.getUsers();
		for(int i = 0;i < users.getSize();++i){
			const nite::UserData &user = users[i];

			if(user.isNew()){
				std::cout << "New User: " << user.getId() << std::endl;
				user_tracker.startSkeletonTracking(user.getId());
			}
			else if(user.isLost()){
				std::cout << "Lost User: " << user.getId() << std::endl;
			}

			if(user.isVisible()){
				const nite::Skeleton &skeleton = user.getSkeleton();
				nite::SkeletonJoint joints[15];

				if(skeleton.getState() == nite::SKELETON_TRACKED){
					joints[0] = skeleton.getJoint(nite::JOINT_HEAD);
					joints[1] = skeleton.getJoint(nite::JOINT_NECK);
					joints[2] = skeleton.getJoint(nite::JOINT_LEFT_SHOULDER);
					joints[3] = skeleton.getJoint(nite::JOINT_RIGHT_SHOULDER);
					joints[4] = skeleton.getJoint(nite::JOINT_LEFT_ELBOW);
					joints[5] = skeleton.getJoint(nite::JOINT_RIGHT_ELBOW);
					joints[6] = skeleton.getJoint(nite::JOINT_LEFT_HAND);
					joints[7] = skeleton.getJoint(nite::JOINT_RIGHT_HAND);
					joints[8] = skeleton.getJoint(nite::JOINT_TORSO);
					joints[9] = skeleton.getJoint(nite::JOINT_LEFT_HIP);
					joints[10] = skeleton.getJoint(nite::JOINT_RIGHT_HIP);
					joints[11] = skeleton.getJoint(nite::JOINT_LEFT_KNEE);
					joints[12] = skeleton.getJoint(nite::JOINT_RIGHT_KNEE);
					joints[13] = skeleton.getJoint(nite::JOINT_LEFT_FOOT);
					joints[14] = skeleton.getJoint(nite::JOINT_RIGHT_FOOT);
				}

				CvPoint2D32f point[15];
				for(int i = 0;i < 15;++i){
					const nite::Point3f &pos = joints[i].getPosition();
					user_tracker.convertJointCoordinatesToDepth(pos.x,pos.y,pos.z,&(point[i].x),&(point[i].y));
				}

				cvLine(&color_image,cvPoint(point[0].x,point[0].y),cvPoint(point[1].x,point[1].y),CV_RGB(255,0,0),3);
				cvLine(&color_image,cvPoint(point[1].x,point[1].y),cvPoint(point[2].x,point[2].y),CV_RGB(255,0,0),3);
				cvLine(&color_image,cvPoint(point[1].x,point[1].y),cvPoint(point[3].x,point[3].y),CV_RGB(255,0,0),3);
				cvLine(&color_image,cvPoint(point[2].x,point[2].y),cvPoint(point[4].x,point[4].y),CV_RGB(255,0,0),3);
				cvLine(&color_image,cvPoint(point[3].x,point[3].y),cvPoint(point[5].x,point[5].y),CV_RGB(255,0,0),3);
				cvLine(&color_image,cvPoint(point[4].x,point[4].y),cvPoint(point[6].x,point[6].y),CV_RGB(255,0,0),3);
				cvLine(&color_image,cvPoint(point[5].x,point[5].y),cvPoint(point[7].x,point[7].y),CV_RGB(255,0,0),3);
				cvLine(&color_image,cvPoint(point[1].x,point[1].y),cvPoint(point[8].x,point[8].y),CV_RGB(255,0,0),3);
				cvLine(&color_image,cvPoint(point[8].x,point[8].y),cvPoint(point[9].x,point[9].y),CV_RGB(255,0,0),3);
				cvLine(&color_image,cvPoint(point[8].x,point[8].y),cvPoint(point[10].x,point[10].y),CV_RGB(255,0,0),3);
				cvLine(&color_image,cvPoint(point[9].x,point[9].y),cvPoint(point[11].x,point[11].y),CV_RGB(255,0,0),3);
				cvLine(&color_image,cvPoint(point[10].x,point[10].y),cvPoint(point[12].x,point[12].y),CV_RGB(255,0,0),3);
				cvLine(&color_image,cvPoint(point[11].x,point[11].y),cvPoint(point[13].x,point[13].y),CV_RGB(255,0,0),3);
				cvLine(&color_image,cvPoint(point[12].x,point[12].y),cvPoint(point[14].x,point[14].y),CV_RGB(255,0,0),3);

				for(int i = 0;i < 15;++i){
					if(joints[i].getPositionConfidence() > 0.5f){
						cvCircle(&color_image,cvPoint(point[i].x,point[i].y),3,CV_RGB(0,255,0),2);
					}
					else{
						cvCircle(&color_image,cvPoint(point[i].x,point[i].y),3,CV_RGB(0,0,255),2);
					}
				}
			}
		}

		cvShowImage("Color",&color_image);

		cv::Mat DepthMat(depth_frame.getHeight(),depth_frame.getWidth(),CV_16UC1,(void*)depth_frame.getData());
		DepthMat.convertTo(DepthMat,CV_8UC1,255.0f / max_depth);
		IplImage depth_image(DepthMat);
		cvShowImage("Depth",&depth_image);

		cvCanny(&depth_image,&depth_image,50,100);
		cvShowImage("Canny",&depth_image);

		if(cvWaitKey(1) == 'q'){
			break;
		}
	}

	cvDestroyWindow("Color");
	cvDestroyWindow("Depth");
	cvDestroyWindow("Canny");

	hand_tracker.destroy();
	color_stream.destroy();
	depth_stream.destroy();
	device.close();
	openni::OpenNI::shutdown();
	nite::NiTE::shutdown();

	return 0;
}


Output:
1
2
3
4
5
6
Line 20: error: highgui.h: No such file or directory
Line 15: error: cv.h: No such file or directory
Line 19: error: OpenNI.h: No such file or directory
Line 17: error: NiTE.h: No such file or directory
Line 2: error: '__int64' does not name a type
compilation terminated due to -Wfatal-errors.


Create a new paste based on this one


Comments: