C,
pasted
on Jan 16:
|
#include <windows.h>
#include <stdio.h>
#include <stdlib.h>
#include <math.h>
#include <ctype.h>
#include <iostream>
#include <GL/gl.h>
#include <GL/glut.h>
#include <cv.h>
#include <highgui.h>
#include <AR/gsub.h>
#include <AR/video.h>
#include <AR/param.h>
#include <AR/ar.h>
using namespace std;
// グローバル変数-----------------------------------
IplImage* opencvImage;
CvCapture* capture;
//カメラパラメータ
char *vconf = "./Data\\WDM_camera_flipV.xml";
int xsize, ysize;
int thresh = 100;
int count = 0;
char *cparam_name = "./Data/camera_para.dat";
ARParam cparam;
// 関数プロトタイプ宣言 ----------------------------
static void ARToolKit_init(void);
static void openCV_init(int argc, char** argv);
static void cleanup(void);
static void keyEvent(unsigned char key, int x, int y);
static void mainLoop(void);
//==================================================
// メインループ
//==================================================
void mainLoop(void)
{
ARUint8 *dataPtr;
IplImage* img;
IplImage* img2;
IplImage* img3;
IplImage* imgNIN;
IplImage* imgNIN2;
IplImage* imgBCO;
IplImage* imgSBN;
int x, y;
uchar rgbTmp[3],tmp[3],tmpp[3],tmppp[3];
//カメラ画像の取得
if( (dataPtr = (ARUint8 *)arVideoGetImage()) == NULL ) {
arUtilSleep(2);
return;
}
//ARToolKit→openCV
opencvImage = cvCreateImage(cvSize(xsize, ysize), IPL_DEPTH_8U, 4 );
memcpy(opencvImage->imageData, dataPtr, opencvImage->imageSize);
//BGRA→BGRに変換
img = cvCreateImage(cvSize(xsize, ysize), IPL_DEPTH_8U, 3);
img2 = cvCreateImage(cvSize(img->width, img->height), IPL_DEPTH_8U, 3);
img3 = cvCreateImage(cvSize(img->width, img->height), IPL_DEPTH_8U, 3);
imgNIN = cvCreateImage(cvSize(img->width,img->height),IPL_DEPTH_8U, 3);
imgNIN2 = cvCreateImage(cvSize(img->width,img->height),IPL_DEPTH_8U, 3);
imgBCO = cvCreateImage(cvSize(img->width,img->height),IPL_DEPTH_8U, 3);
imgSBN = cvCreateImage(cvSize(img->width,img->height),IPL_DEPTH_8U, 3);
//画像配列の確保
for(y=0; y < opencvImage->height; y++){
for(x=0; x < opencvImage->width; x++){
rgbTmp[0] = opencvImage->imageData[opencvImage->widthStep * y + x*4]; // B
rgbTmp[1] = opencvImage->imageData[opencvImage->widthStep * y + x*4 + 1]; // G
rgbTmp[2] = opencvImage->imageData[opencvImage->widthStep * y + x*4 + 2]; // R
img->imageData[img->widthStep * y + x*3] = rgbTmp[0];
img->imageData[img->widthStep * y + x*3 + 1] = rgbTmp[1];
img->imageData[img->widthStep * y + x*3 + 2] = rgbTmp[2];
img2->imageData[img->widthStep * y + x*3] = rgbTmp[0];
img2->imageData[img->widthStep * y + x*3 + 1] = rgbTmp[1];
img2->imageData[img->widthStep * y + x*3 + 2] = rgbTmp[2];
img3->imageData[img->widthStep * y + x*3] = rgbTmp[0];
img3->imageData[img->widthStep * y + x*3 + 1] = rgbTmp[1];
img3->imageData[img->widthStep * y + x*3 + 2] = rgbTmp[2];
imgNIN->imageData[img->widthStep * y + x*3] = rgbTmp[0];
imgNIN->imageData[img->widthStep * y + x*3 + 1] = rgbTmp[1];
imgNIN->imageData[img->widthStep * y + x*3 + 2] = rgbTmp[2];
imgNIN2->imageData[img->widthStep * y + x*3] = rgbTmp[0];
imgNIN2->imageData[img->widthStep * y + x*3 + 1] = rgbTmp[1];
imgNIN2->imageData[img->widthStep * y + x*3 + 2] = rgbTmp[2];
imgBCO->imageData[img->widthStep * y + x*3] = rgbTmp[0];
imgBCO->imageData[img->widthStep * y + x*3 + 1] = rgbTmp[1];
imgBCO->imageData[img->widthStep * y + x*3 + 2] = rgbTmp[2];
imgSBN->imageData[img->widthStep * y + x*3] = rgbTmp[0];
imgSBN->imageData[img->widthStep * y + x*3 + 1] = rgbTmp[1];
imgSBN->imageData[img->widthStep * y + x*3 + 2] = rgbTmp[2];
}
}
//元画像表示(ARToolKit)
argDrawMode2D();
argDispImage( dataPtr, 0,0 );
argSwapBuffers();
//処理------------------------------------------------------------------
// ケーブルの認識処理
for(y=0; y < img->height; y++){
for(x=0; x < img->width; x++){
tmp[0] = img->imageData[img->widthStep * y + x*3]; // B
tmp[1] = img->imageData[img->widthStep * y + x*3 + 1]; // G
tmp[2] = img->imageData[img->widthStep * y + x*3 + 2]; // R
// 閾値処理
if(tmp[2]<=150 && tmp[1]<=150 && tmp[0]<=180){
imgNIN->imageData[img->widthStep * y + x*3] = 255;
imgNIN->imageData[img->widthStep * y + x*3 + 1] = 255;
imgNIN->imageData[img->widthStep * y + x*3 + 2] = 255;
}
}
}
// ケーブルの膨張処理
int dilations = 1; //膨張回数
if( dilations > 0 )
cvDilate( imgNIN,imgBCO,0,dilations );
int dilations2 = 1; //膨張回数
if( dilations2 > 0 )
cvDilate( imgNIN,imgNIN2,0,dilations2 );
// 差分画像を作成
for(y=0; y < img->height; y++){
for(x=0; x < img->width; x++){
imgSBN->imageData[img->widthStep * y + x*3] = (imgBCO->imageData[img->widthStep * y + (x)*3]) - (imgNIN->imageData[img->widthStep * y + (x)*3]);
imgSBN->imageData[img->widthStep * y + x*3 + 1] = (imgBCO->imageData[img->widthStep * y + (x)*3 + 1]) - (imgNIN->imageData[img->widthStep * y + (x)*3 + 1]);
imgSBN->imageData[img->widthStep * y + x*3 + 2] = (imgBCO->imageData[img->widthStep * y + (x)*3 + 2]) - (imgNIN->imageData[img->widthStep * y + (x)*3 + 2]);
}
}
// ケーブルの除去処理
for(y=0; y < img->height; y++){
for(x=0; x < img->width; x++){
tmpp[0] = imgNIN->imageData[img->widthStep * y + x*3]; // B
tmpp[1] = imgNIN->imageData[img->widthStep * y + x*3 + 1]; // G
tmpp[2] = imgNIN->imageData[img->widthStep * y + x*3 + 2]; // R
tmppp[0] = imgSBN->imageData[img->widthStep * y + x*3]; // B
tmppp[1] = imgSBN->imageData[img->widthStep * y + x*3 + 1]; // G
tmppp[2] = imgSBN->imageData[img->widthStep * y + x*3 + 2]; // R
// 閾値処理
if (tmpp[2]==255 && tmpp[1]==255 && tmpp[0]==255) {
if (tmppp[2]==0 && tmppp[1]==0 && tmppp[0]==0) {
img2->imageData[img->widthStep * (y) + (x)*3] = img->imageData[img->widthStep * (y) + (x-30)*3];
img2->imageData[img->widthStep * (y) + (x)*3 + 1] = img->imageData[img->widthStep * (y) + (x-30)*3 + 1];
img2->imageData[img->widthStep * (y) + (x)*3 + 2] = img->imageData[img->widthStep * (y) + (x-30)*3 + 2];
}
else {
img2->imageData[img->widthStep * (y) + (x)*3] = img->imageData[img->widthStep * (y) + (x+30)*3];
img2->imageData[img->widthStep * (y) + (x)*3 + 1] = img->imageData[img->widthStep * (y) + (x+30)*3 + 1];
img2->imageData[img->widthStep * (y) + (x)*3 + 2] = img->imageData[img->widthStep * (y) + (x+30)*3 + 2];
}
}
else {
img2->imageData[img->widthStep * (y) + (x)*3] = img->imageData[img->widthStep * (y) + (x)*3];
img2->imageData[img->widthStep * (y) + (x)*3 + 1] = img->imageData[img->widthStep * (y) + (x)*3 + 1];
img2->imageData[img->widthStep * (y) + (x)*3 + 2] = img->imageData[img->widthStep * (y) + (x)*3 + 2];
}
}
}
for(y=0; y < img->height; y++){
for(x=0; x < img->width; x++){
tmpp[0] = img2->imageData[img->widthStep * y + x*3]; // B
tmpp[1] = img2->imageData[img->widthStep * y + x*3 + 1]; // G
tmpp[2] = img2->imageData[img->widthStep * y + x*3 + 2]; // R
// 背景の閾値処理
if( !(tmpp[2]<=30 && tmpp[1]<=30 && tmpp[0]<=25) ){
if(x-30 > 0 && y-30 > 0){
img3->imageData[img->widthStep * y + x*3] = img2->imageData[img->widthStep * (y-30) + (x-30)*3];
img3->imageData[img->widthStep * y + x*3 + 1] = img2->imageData[img->widthStep * (y-30) + (x-30)*3 + 1];
img3->imageData[img->widthStep * y + x*3 + 2] = img2->imageData[img->widthStep * (y-30) + (x-30)*3 + 2];
}
else if (x-30 < 0 && y-30 > 0){
img3->imageData[img->widthStep * y + x*3] = img2->imageData[img->widthStep * (y-30) + (x+30)*3];
img3->imageData[img->widthStep * y + x*3 + 1] = img2->imageData[img->widthStep * (y-30) + (x+30)*3 + 1];
img3->imageData[img->widthStep * y + x*3 + 2] = img2->imageData[img->widthStep * (y-30) + (x+30)*3 + 2];
}
else if (x-30 > 0 && y-30 < 0){
img3->imageData[img->widthStep * y + x*3] = img2->imageData[img->widthStep * (y+30) + (x-30)*3];
img3->imageData[img->widthStep * y + x*3 + 1] = img2->imageData[img->widthStep * (y+30) + (x-30)*3 + 1];
img3->imageData[img->widthStep * y + x*3 + 2] = img2->imageData[img->widthStep * (y+30) + (x-30)*3 + 2];
}
else if (x-30 < 0 && y-30 < 0){
img3->imageData[img->widthStep * y + x*3] = img2->imageData[img->widthStep * (y+30) + (x+30)*3];
img3->imageData[img->widthStep * y + x*3 + 1] = img2->imageData[img->widthStep * (y+30) + (x+30)*3 + 1];
img3->imageData[img->widthStep * y + x*3 + 2] = img2->imageData[img->widthStep * (y+30) + (x+30)*3 + 2];
}
}
}
}
//画像表示(OpenCV)
cvShowImage("NormalImage", img);
cvShowImage("NormalImage2", img2);
cvShowImage("NormalImage3", img3);
cvShowImage("Chromakey", imgNIN);
cvShowImage("Chromakey2", imgNIN2);
cvShowImage("Dilate", imgBCO);
cvShowImage("差分", imgSBN);
if( cvWaitKey(10) >= 0 )
return;
//メモリ解放(OpenCV)
cvReleaseCapture( &capture );
cvReleaseImage( &opencvImage );
cvReleaseImage( &img );
cvReleaseImage( &img2);
cvReleaseImage( &img3);
cvReleaseImage( &imgNIN );
cvReleaseImage( &imgNIN2 );
cvReleaseImage( &imgBCO );
cvReleaseImage( &imgSBN );
}
//==================================================
// キーイベント処理
//==================================================
static void keyEvent( unsigned char key, int x, int y)
{
switch(key){
case 'q':
cleanup();
exit(0);
break;
case 'f':
break;
case 0x1b:
cleanup();
exit(0);
break;
default:
break;
}
}
//==================================================
// ARToolKit関連初期化
//==================================================
static void ARToolKit_init()
{
ARParam wparam;
// ビデオデバイスの設定
if( arVideoOpen( vconf ) < 0 ) exit(0);
// ビデオ画像のサイズを取得
if( arVideoInqSize(&xsize, &ysize) < 0 ) exit(0);
printf("Image size (x,y) = (%d,%d)\n", xsize, ysize);
// カメラパラメータの初期化
if( arParamLoad(cparam_name, 1, &wparam) < 0 ) {
printf("Camera parameter load error !!\n");
exit(0);
}
arParamChangeSize( &wparam, xsize, ysize, &cparam );
arInitCparam( &cparam );
printf("*** Camera Parameter ***\n");
arParamDisp( &cparam );
// ウィンドウの作成(ARToolKit)
// OpenGL初期化
argInit( &cparam, 1.0, 0, 0, 0, 0 );
}
//==================================================
// OpenCV関連初期化
//==================================================
static void openCV_init(int argc, char** argv)
{
capture = 0;
if( argc == 1 || (argc == 2 && strlen(argv[1]) == 1 && isdigit(argv[1][0])))
capture = cvCaptureFromCAM( argc == 2 ? argv[1][0] - '0' : 0 );
else if( argc == 2 )
capture = cvCaptureFromAVI( argv[1] );
capture = cvCaptureFromAVI( argv[1] );
if( !capture )
{
fprintf(stderr,"Could not initialize capturing...\n");
}
//画像表示用ウィンドウの作成(openCV)
cvNamedWindow("NormalImage", CV_WINDOW_AUTOSIZE);
cvNamedWindow("NormalImage2", CV_WINDOW_AUTOSIZE);
cvNamedWindow("NormalImage3", CV_WINDOW_AUTOSIZE);
cvNamedWindow("Chromakey", CV_WINDOW_AUTOSIZE);
cvNamedWindow("Chromakey2", CV_WINDOW_AUTOSIZE);
cvNamedWindow("Dilate", CV_WINDOW_AUTOSIZE);
cvNamedWindow("差分", CV_WINDOW_AUTOSIZE);
}
//==================================================
// 終了処理
//==================================================
static void cleanup(void)
{
cvReleaseCapture(&capture);
cvDestroyWindow("NormalImage");
cvDestroyWindow("NormalImage2");
cvDestroyWindow("NormalImage3");
cvDestroyWindow("Chromakey");
cvDestroyWindow("Chromakey2");
cvDestroyWindow("Dilate");
cvDestroyWindow("差分");
arVideoCapStop();
arVideoClose();
argCleanup();
}
//==================================================
// メイン
//==================================================
int main(int argc, char** argv)
{
glutInit(&argc, argv);
ARToolKit_init();
openCV_init(argc, argv);
arVideoCapStart();
argMainLoop( NULL, keyEvent, mainLoop );
return 0;
}
|
Output:
|
Line 20: error: windows.h: No such file or directory
Line 19: error: iostream: No such file or directory
Line 18: error: GL/gl.h: No such file or directory
Line 20: error: GL/glut.h: No such file or directory
Line 15: error: cv.h: No such file or directory
Line 20: error: highgui.h: No such file or directory
Line 20: error: AR/gsub.h: No such file or directory
Line 21: error: AR/video.h: No such file or directory
Line 21: error: AR/param.h: No such file or directory
Line 18: error: AR/ar.h: No such file or directory
Line 20: error: expected '=', ',', ';', 'asm' or '__attribute__' before 'namespace'
Line 23: error: expected '=', ',', ';', 'asm' or '__attribute__' before '*' token
Line 24: error: expected '=', ',', ';', 'asm' or '__attribute__' before '*' token
Line 33: error: expected '=', ',', ';', 'asm' or '__attribute__' before 'cparam'
In function 'mainLoop':
Line 47: error: 'ARUint8' undeclared (first use in this function)
Line 47: error: (Each undeclared identifier is reported only once
Line 47: error: for each function it appears in.)
Line 47: error: 'dataPtr' undeclared (first use in this function)
Line 48: error: 'IplImage' undeclared (first use in this function)
Line 48: error: 'img' undeclared (first use in this function)
Line 49: error: 'img2' undeclared (first use in this function)
Line 50: error: 'img3' undeclared (first use in this function)
Line 51: error: 'imgNIN' undeclared (first use in this function)
Line 52: error: 'imgNIN2' undeclared (first use in this function)
Line 53: error: 'imgBCO' undeclared (first use in this function)
Line 54: error: 'imgSBN' undeclared (first use in this function)
Line 58: error: 'uchar' undeclared (first use in this function)
Line 58: error: expected ';' before 'rgbTmp'
Line 61: error: expected expression before ')' token
Line 67: error: 'opencvImage' undeclared (first use in this function)
Line 67: error: 'IPL_DEPTH_8U' undeclared (first use in this function)
Line 68: warning: incompatible implicit declaration of built-in function 'memcpy'
Line 82: error: 'rgbTmp' undeclared (first use in this function)
Line 119: error: 'tmp' undeclared (first use in this function)
Line 153: error: 'tmpp' undeclared (first use in this function)
Line 157: error: 'tmppp' undeclared (first use in this function)
Line 229: error: 'capture' undeclared (first use in this function)
In function 'ARToolKit_init':
Line 268: error: 'ARParam' undeclared (first use in this function)
Line 268: error: expected ';' before 'wparam'
Line 277: error: 'wparam' undeclared (first use in this function)
Line 281: error: 'cparam' undeclared (first use in this function)
In function 'openCV_init':
Line 296: error: 'capture' undeclared (first use in this function)
Line 298: warning: incompatible implicit declaration of built-in function 'strlen'
Line 311: error: 'CV_WINDOW_AUTOSIZE' undeclared (first use in this function)
In function 'cleanup':
Line 325: error: 'capture' undeclared (first use in this function)
|
|