三次元ポイントクラウドの表示
概要
Kinectから得られたRGB,深度データを用いてポイントクラウドを三次元座標上に表示します.
サンプルコード
※OpenNI.lib,glut32.libをリンクする必要あり
opennniDevice.h <デバイスの初期化とデータの取得>
1 #ifndef _PR_DEVICE
2 #define _PR_DEVICE
3
4 #include "XnOS.h"
5 #include "XnCppWrapper.h"
6
7 using namespace xn;
8
9 // Defines
10 #define SAMPLE_XML_PATH "../data/SamplesConfig.xml"
11
12 class OpenNIDevice {
13 private:
14 Context context;
15 DepthGenerator depth;
16 ImageGenerator image;
17 UserGenerator user;
18 DepthMetaData depthMD;
19 ImageMetaData imageMD;
20 SceneMetaData sceneMD;
21
22 int width, height;
23
24 public:
25 double fXToZ, fYToZ;
26
27 OpenNIDevice();
28 ~OpenNIDevice();
29 int OpenDevice(); // デバイス初期化
30 int InqSize(int *x, int *y);
31 void GetData(unsigned char *imageBuffer, unsigned short *depthRawBuffer, unsigned char *userBuffer);
32 unsigned int createDrawArrays(unsigned char *imageBuffer, unsigned short *depthRawBuffer, unsigned char *userBuffer, unsigned char *vertexArray, int mode);
33
34 void KinectDepthToWorld(float &x, float &y, float &z);
35 void KinectDepthToWorldArray(int num, XnPoint3D* inArray, XnPoint3D* outArray);
36
37 int CloseDevice(void);
38
39 };
40
41 #endif // _PR_DEVICE
42
opennniDevice.cpp
1 #define NOMINMAX
2
3 #include "openniDevice.h"
4 #include <fstream>
5 #include <string>
6
7 // Globals
8 // ユーザトラッキングの色
9 unsigned char colors[5][3] = {
10 {255, 0, 0},
11 {0, 255, 0},
12 {0, 0, 255},
13 {127, 127, 0},
14 {127, 0, 127}
15 };
16
17 // コンストラクタ
18 OpenNIDevice::OpenNIDevice() {
19 }
20
21 // デストラクタ
22 OpenNIDevice::~OpenNIDevice() {
23 }
24
25 /**
26 * デバイスの初期化.
27 */
28 int OpenNIDevice::OpenDevice() {
29 XnStatus rc;
30
31 EnumerationErrors errors;
32 rc = context.InitFromXmlFile(SAMPLE_XML_PATH, &errors);
33 if (rc == XN_STATUS_NO_NODE_PRESENT) {
34 XnChar strError[1024];
35 errors.ToString(strError, 1024);
36 printf("%s\n", strError);
37 return (-1);
38 } else if (rc != XN_STATUS_OK) {
39 printf("Open failed: %s\n", xnGetStatusString(rc));
40 return (-1);
41 }
42
43 rc = context.FindExistingNode(XN_NODE_TYPE_DEPTH, depth);
44 rc = context.FindExistingNode(XN_NODE_TYPE_IMAGE, image);
45 rc = user.Create(context);
46
47 depth.GetMetaData(depthMD);
48 image.GetMetaData(imageMD);
49
50 // Hybrid mode isn't supported in this sample
51 if (imageMD.FullXRes() != depthMD.FullXRes() || imageMD.FullYRes() != depthMD.FullYRes()) {
52 printf ("The device depth and image resolution must be equal!\n");
53 return -1;
54 }
55
56 // RGB is the only image format supported.
57 if (imageMD.PixelFormat() != XN_PIXEL_FORMAT_RGB24) {
58 printf("The device image format must be RGB24\n");
59 return -1;
60 }
61
62 depth.GetAlternativeViewPointCap().SetViewPoint(image); // imageとdepthのズレを自動で合わせる
63 context.SetGlobalMirror(FALSE); // FALSEで通常画像 デフォルトでは左右反転画像
64 width = (int)depthMD.FullXRes();
65 height = (int)depthMD.FullYRes();
66
67 XnFieldOfView FOV;
68 depth.GetFieldOfView(FOV);
69 fXToZ = tan(FOV.fHFOV / 2) * 2;
70 fYToZ = tan(FOV.fVFOV / 2) * 2;
71
72 context.StartGeneratingAll();
73
74 return 0;
75 }
76
77 /**
78 * 画面サイズを取得.
79 */
80 int OpenNIDevice::InqSize(int *x, int *y) {
81 *x = width;
82 *y = height;
83 return 0;
84 }
85
86 /**
87 * デバイスの終了処理.
88 */
89 int OpenNIDevice::CloseDevice(void)
90 {
91 context.Shutdown();
92 return 0;
93 };
94
95 /**
96 * 画素のBGRデータ・深度データを出力.
97 * imageBuffer, depthRawBufferに格納される.
98 */
99 void OpenNIDevice::GetData(unsigned char *imageBuffer, unsigned short *depthRawBuffer, unsigned char *userBuffer) {
100 XnStatus rc = XN_STATUS_OK;
101
102 // Read a new frame
103 rc = context.WaitAnyUpdateAll();
104 if (rc != XN_STATUS_OK) {
105 printf("Read failed: %s\n", xnGetStatusString(rc));
106 return;
107 }
108
109 depth.GetMetaData(depthMD);
110 image.GetMetaData(imageMD);
111 user.GetUserPixels(0, sceneMD);
112
113 // image. -------------------------------------------------------------
114 const XnRGB24Pixel* pImageRow = imageMD.RGB24Data();
115 unsigned char* pBuffRow = imageBuffer;
116 for (XnUInt y = 0; y < imageMD.YRes(); ++y) {
117 const XnRGB24Pixel* pImage = pImageRow;
118 unsigned char* pBuff = pBuffRow;
119
120 for (XnUInt x = 0; x < imageMD.XRes(); x++, pImage++, pBuff += 3) {
121 pBuff[0] = pImage->nBlue;
122 pBuff[1] = pImage->nGreen;
123 pBuff[2] = pImage->nRed;
124 }
125
126 pImageRow += imageMD.XRes();
127 pBuffRow += imageMD.XRes() * 3;
128 }
129 // --------------------------------------------------------------------
130
131 // depth. -------------------------------------------------------------
132 xnOSMemSet(depthRawBuffer, 0, sizeof(depthRawBuffer));
133
134 const XnDepthPixel* pDepthRow = depthMD.Data();
135 unsigned short* pDepthBuffRow = depthRawBuffer;
136 for (XnUInt y = 0; y < depthMD.YRes(); ++y) {
137 const XnDepthPixel* pDepth = pDepthRow;
138 unsigned short* pBuff = pDepthBuffRow;
139
140 for (XnUInt x = 0; x < depthMD.XRes(); x++, pDepth++, pBuff++) {
141 if (*pDepth != 0) {
142 *pBuff = *pDepth;
143 }
144 }
145
146 pDepthRow += depthMD.XRes();
147 pDepthBuffRow += depthMD.XRes();
148 }
149 // --------------------------------------------------------------------
150
151 // user. --------------------------------------------------------------
152 const XnLabel *label = sceneMD.Data();
153 unsigned char *pUserBuffRow = userBuffer;
154 for (XnUInt y = 0; y < depthMD.YRes(); y++) {
155 unsigned char* pBuff = pUserBuffRow;
156
157 for (XnUInt x = 0; x < depthMD.XRes(); x++, label++, pBuff++) {
158 *pBuff = (unsigned char)(*label);
159 }
160
161 pUserBuffRow += depthMD.XRes();
162 }
163 // --------------------------------------------------------------------
164 }
165
166 /**
167 * DrawArrayを生成する.
168 * 描画する分だけデータをoutputArrayDataに格納する
169 * 現在の格納されたデータ数を返す
170 */
171 unsigned int OpenNIDevice::createDrawArrays(unsigned char *imageBuffer, unsigned short *depthRawBuffer, unsigned char *userBuffer, unsigned char *vertexArray, int mode) {
172
173 unsigned char *pImageBuffer = imageBuffer;
174 unsigned short *pDepthBuffer = depthRawBuffer;
175 unsigned char *pUserBuffer = userBuffer;
176 memset((void *)vertexArray, 0, width * height * ((sizeof(char) * 4 + sizeof(float) * 3)));
177 unsigned char *pArray = vertexArray;
178 int count = 0;
179 for (int y = 0; y < height; y++) {
180 for (int x = 0; x < width; x++) {
181 // 深度0のとき描画しない
182 if ((*pDepthBuffer) != 0) {
183 int userlabel = (int)(*pUserBuffer);
184 if (mode == 1 && userlabel != 0 && userlabel < 6) {
185 // ユーザ検出モード
186 pArray[0] = colors[userlabel - 1][0];
187 pArray[1] = colors[userlabel - 1][1];
188 pArray[2] = colors[userlabel - 1][2];
189 pArray[3] = 0;
190 } else {
191 pArray[0] = pImageBuffer[2]; // R
192 pArray[1] = pImageBuffer[1]; // G
193 pArray[2] = pImageBuffer[0]; // B
194 pArray[3] = 0;
195 }
196
197 // 座標情報を格納
198 float fNormalizedX = ((float)x / width - 0.5f);
199 float fNormalizedY = (0.5f - (float)y / height);
200 float tmpX = (float)(fNormalizedX * (float)(*pDepthBuffer) * fXToZ);
201 float tmpY = (float)(fNormalizedY * (float)(*pDepthBuffer) * fYToZ);
202 float tmpZ = (float)(*pDepthBuffer);
203
204 *((float *) (pArray + 4)) = tmpX;
205 *((float *) (pArray + 4 + sizeof(float))) = tmpY;
206 *((float *) (pArray + 4 + sizeof(float) * 2)) = tmpZ;
207 pArray += sizeof(char) * 4 + sizeof(float) * 3;
208 count++;
209 }
210 pImageBuffer += 3;
211 pDepthBuffer++;
212 pUserBuffer++;
213 }
214 }
215 return count;
216 }
217
218 /**
219 * Depthカメラ(raw)->ワールド座標(単位mm)
220 * 入力:
221 * x:参照型 float :DepthカメラX座標 0~639
222 * y:参照型 float :DepthカメラY座標 0~479
223 * z:参照型 float :DepthカメラDepth 0~
224 * 出力:
225 * x:参照型 float :ワールド座標X(単位mm)
226 * y:参照型 float :ワールド座標Y(単位mm)
227 * z:参照型 float :ワールド座標Z(単位mm)
228 */
229 void OpenNIDevice::KinectDepthToWorld(float &x, float &y, float &z) {
230 XnPoint3D in[1],out[1];
231
232 in[0].X = x;
233 in[0].Y = y;
234 in[0].Z = z;
235 depth.ConvertProjectiveToRealWorld (1,in,out);
236 x = out[0].X;
237 y =-out[0].Y; //ARToolKitのカメラ座標系は、Y軸が下向き
238 z = out[0].Z;
239 };
240
241 void OpenNIDevice::KinectDepthToWorldArray(int num, XnPoint3D* inArray, XnPoint3D* outArray) {
242 depth.ConvertProjectiveToRealWorld ((XnUInt32)num, inArray, outArray);
243 }
main.cpp <メイン>
1 #include "openniDevice.h"
2 #include "glut.h"
3
4 #define WINDOW_X 640
5 #define WINDOW_Y 480
6
7 // モード
8 enum modeList {
9 NORMAL_MODE,
10 USER_MODE
11 } MODE_LIST;
12
13 int windowX = WINDOW_X;
14 int windowY = WINDOW_Y;
15 int mode = NORMAL_MODE;
16
17 int xsize, ysize;
18 OpenNIDevice device;
19
20 // 各データのバッファ
21 static unsigned char gImageBuffer[640 * 480 * 3];
22 static unsigned short gDepthRawBuffer[640 * 480];
23 static unsigned char gUserBuffer[640 * 480];
24 // 描画用
25 static unsigned char gVertexArray[640 * 480 * (sizeof(char) * 4 + sizeof(float) * 3)];
26
27 // 関数宣言
28 void glutIdle(void);
29 void glutDisplay(void);
30 void glutKeyboard(unsigned char key, int x, int y);
31 void glutReshape(int w, int h);
32 void Cleanup(void);
33
34 /**
35 * GLアイドル関数.
36 */
37 void glutIdle (void) {
38 // Display the frame
39 glutPostRedisplay();
40 }
41
42 /**
43 * ウインドウサイズ変更時の処理.
44 */
45 void glutReshape(int w, int h) {
46 glViewport(0, 0, w, h);
47 windowX = w;
48 windowY = h;
49 }
50
51 /**
52 * ディスプレイ関数.
53 */
54 void glutDisplay(void) {
55 // データの取得
56 unsigned char *pImageBuffer = gImageBuffer;
57 unsigned short *pDepthBuffer = gDepthRawBuffer;
58 unsigned char *pUserBuffer = gUserBuffer;
59 unsigned char *pVertexArray = gVertexArray;
60 device.GetData(pImageBuffer, pDepthBuffer, pUserBuffer);
61
62 // vertex array 生成
63 int arraySize = device.createDrawArrays(pImageBuffer, pDepthBuffer, pUserBuffer, pVertexArray, mode);
64
65 // 描画準備
66 glMatrixMode(GL_MODELVIEW);
67 glLoadIdentity();
68 glClear(GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT);
69 glEnable(GL_DEPTH_TEST);
70
71 // Auto Rotate ワールド座標の鉛直線を軸にして回転
72 static float xrot = 0.0f;
73 glRotatef(30.0f, 1.0f, 0.0f, 0.0f);
74 glRotatef(xrot, 0.0f, 1.0f, 0.0f);
75 xrot += 1.0f;
76
77 // 描画処理
78 glPushMatrix(); {
79 // ポイントクラウドを描画
80 glInterleavedArrays(GL_C4UB_V3F, 0, pVertexArray);
81 glDrawArrays(GL_POINTS, 0, arraySize);
82
83 } glPopMatrix();
84
85 // 描画バッファ更新
86 glutSwapBuffers();
87 }
88
89 /**
90 * キーボード.
91 */
92 void glutKeyboard(unsigned char key, int x, int y) {
93 #ifdef _DEBUG // 入力キーを表示
94 printf("[%c] key was pressed\n", key);
95 #endif
96
97 switch (key) {
98 case 0x1b: // ESCキーを入力したらアプリケーション終了
99 Cleanup();
100 exit(0);
101 break;
102 case 'u':
103 mode = (mode++) % 2;
104 if (mode == USER_MODE)
105 printf("user mode ON\n");
106 else
107 printf("user mode OFF\n");
108 break;
109 default:
110 break;
111 }
112 }
113
114 /**
115 * 終了処理関数.
116 */
117 void Cleanup(void) {
118 device.CloseDevice();
119 }
120
121 /**
122 * メイン関数.
123 */
124 int main(int argc, char **argv) {
125 // OpenNI init
126 if (device.OpenDevice() < 0) exit(0);
127 if (device.InqSize(&xsize, &ysize) < 0) exit(0);
128
129 // OpenGL init
130 glutInit(&argc, argv);
131 glutInitDisplayMode(GLUT_RGB | GLUT_DOUBLE | GLUT_DEPTH);
132 glutInitWindowSize(windowX, windowY);
133 glutCreateWindow("3D Point Cloud");
134
135 glutKeyboardFunc(glutKeyboard);
136 glutDisplayFunc(glutDisplay);
137 glutIdleFunc(glutIdle);
138
139 glMatrixMode(GL_PROJECTION);
140 glLoadIdentity();
141 gluPerspective(60.0, (double)xsize / (double)ysize, 0.01, 100000.0);
142
143 /// 視点位置
144 gluLookAt(0.0f, 0.0f, 3000.0f, 0.0f, 0.0f, 0.0f, 0.0f, 1.0f, 0.0f);
145
146 glutMainLoop();
147 }
座標の変換
XnStatus xn::DepthGenerator::ConvertProjectiveToRealWorld(XnUInt32 nCount, const XnPoint3D *aProjective, XnPoint3D *aRealWorld) const
- 取得した深度rawデータからワールド座標(mm)に変換