-
Notifications
You must be signed in to change notification settings - Fork 2
/
InversePerspectiveMapping.cc
726 lines (659 loc) · 22.8 KB
/
InversePerspectiveMapping.cc
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
294
295
296
297
298
299
300
301
302
303
304
305
306
307
308
309
310
311
312
313
314
315
316
317
318
319
320
321
322
323
324
325
326
327
328
329
330
331
332
333
334
335
336
337
338
339
340
341
342
343
344
345
346
347
348
349
350
351
352
353
354
355
356
357
358
359
360
361
362
363
364
365
366
367
368
369
370
371
372
373
374
375
376
377
378
379
380
381
382
383
384
385
386
387
388
389
390
391
392
393
394
395
396
397
398
399
400
401
402
403
404
405
406
407
408
409
410
411
412
413
414
415
416
417
418
419
420
421
422
423
424
425
426
427
428
429
430
431
432
433
434
435
436
437
438
439
440
441
442
443
444
445
446
447
448
449
450
451
452
453
454
455
456
457
458
459
460
461
462
463
464
465
466
467
468
469
470
471
472
473
474
475
476
477
478
479
480
481
482
483
484
485
486
487
488
489
490
491
492
493
494
495
496
497
498
499
500
501
502
503
504
505
506
507
508
509
510
511
512
513
514
515
516
517
518
519
520
521
522
523
524
525
526
527
528
529
530
531
532
533
534
535
536
537
538
539
540
541
542
543
544
545
546
547
548
549
550
551
552
553
554
555
556
557
558
559
560
561
562
563
564
565
566
567
568
569
570
571
572
573
574
575
576
577
578
579
580
581
582
583
584
585
586
587
588
589
590
591
592
593
594
595
596
597
598
599
600
601
602
603
604
605
606
607
608
609
610
611
612
613
614
615
616
617
618
619
620
621
622
623
624
625
626
627
628
629
630
631
632
633
634
635
636
637
638
639
640
641
642
643
644
645
646
647
648
649
650
651
652
653
654
655
656
657
658
659
660
661
662
663
664
665
666
667
668
669
670
671
672
673
674
675
676
677
678
679
680
681
682
683
684
685
686
687
688
689
690
691
692
693
694
695
696
697
698
699
700
701
702
703
704
705
706
707
708
709
710
711
712
713
714
715
716
717
718
719
720
721
722
723
724
725
726
/***
* \file InversePerspectiveMapping.cc
* \author Mohamed Aly <[email protected]>
* \date 11/29/2006
*/
#include "InversePerspectiveMapping.hh"
#include "CameraInfoOpt.h"
#include <iostream>
#include <math.h>
#include <assert.h>
#include <list>
using namespace std;
#include <cv.h>
#include <highgui.h>
namespace LaneDetector
{
#define VP_PORTION 0.05
/*
We are assuming the world coordinate frame center is at the camera,
the ground plane is at height -h, the X-axis is going right,
the Y-axis is going forward, the Z-axis is going up. The
camera is looking forward with optical axis in direction of
Y-axis, with possible pitch angle (above or below the Y-axis)
and yaw angle (left or right).
The camera coordinates have the same center as the world, but the Xc-axis goes right,
the Yc-axis goes down, and the Zc-axis (optical cxis) goes forward. The
uv-plane of the image is such that u is horizontal going right, v is
vertical going down.
The image coordinates uv are such that the pixels are at half coordinates
i.e. first pixel is (.5,.5) ...etc where the top-left point is (0,0) i.e.
the tip of the first pixel is (0,0)
*/
/**
* This function returns the Inverse Perspective Mapping
* of the input image, assuming a flat ground plane, and
* given the camera parameters.
*
* \param inImage the input image
* \param outImage the output image in IPM
* \param ipmInfo the returned IPM info for the transformation
* \param focalLength focal length (in x and y direction)
* \param cameraInfo the camera parameters
* \param outPoints indices of points outside the image
*/
void mcvGetIPM(const CvMat* inImage, CvMat* outImage,
IPMInfo *ipmInfo, const CameraInfo *cameraInfo,
list<CvPoint> *outPoints)
{
//check input images types
//CvMat inMat, outMat;
//cvGetMat(inImage, &inMat);
//cvGetMat(outImage, &outMat);
//cout << CV_MAT_TYPE(inImage->type) << " " << CV_MAT_TYPE(FLOAT_MAT_TYPE) << " " << CV_MAT_TYPE(INT_MAT_TYPE)<<"\n";
//cout<<"printing matype:";
//SHOT_MAT_TYPE(inImage);
//cout<<"CV_MAT_TYPE(FLOAT_MAT_TYPE)"<<CV_MAT_TYPE(FLOAT_MAT_TYPE);
if (!(CV_ARE_TYPES_EQ(inImage, outImage) &&
(CV_MAT_TYPE(inImage->type)==CV_MAT_TYPE(FLOAT_MAT_TYPE) ||
(CV_MAT_TYPE(inImage->type)==CV_MAT_TYPE(INT_MAT_TYPE)))))
{
cerr << "Unsupported image types in mcvGetIPM";
exit(1);
}
cout<<"COMING TILL HEREEEEEEEEEEEEEEEEEEEEEEEEEEEEEEEEEEEEEEEEEEEE";
//get size of input image
FLOAT u, v;
v = inImage->height;
u = inImage->width;
//get the vanishing point
FLOAT_POINT2D vp;
vp = mcvGetVanishingPoint(cameraInfo); //defined here only.
vp.y = MAX(0, vp.y);
//vp.y = 30;
//get extent of the image in the xfyf plane
FLOAT_MAT_ELEM_TYPE eps = ipmInfo->vpPortion * v;//VP_PORTION*v;
ipmInfo->ipmLeft = MAX(0, ipmInfo->ipmLeft);
ipmInfo->ipmRight = MIN(u-1, ipmInfo->ipmRight);
ipmInfo->ipmTop = MAX(vp.y+eps, ipmInfo->ipmTop);
ipmInfo->ipmBottom = MIN(v-1, ipmInfo->ipmBottom);
FLOAT_MAT_ELEM_TYPE uvLimitsp[] = {vp.x,
ipmInfo->ipmRight, ipmInfo->ipmLeft, vp.x,
ipmInfo->ipmTop, ipmInfo->ipmTop, ipmInfo->ipmTop, ipmInfo->ipmBottom};
////********************************************************************************************************8
//FLOAT_MAT_ELEM_TYPE miduvLimitsp[]={320,450,1,1}; ///************************lanemidpoint*************8
//{vp.x, u, 0, vp.x,
//vp.y+eps, vp.y+eps, vp.y+eps, v};
//CvMat miduvLimits=cvMat(1,4,FLOAT_MAT_TYPE,miduvLimitsp); ////***************lanemidpointMat************8
//*************************************************************************************************************
CvMat uvLimits = cvMat(2, 4, FLOAT_MAT_TYPE, uvLimitsp);
//int p, q;
//cout<<"printing uvLimits";
//for(p=0;p<uvLimits.rows;p++)
//{
//for(q=0;q<uvLimits.cols;q++)
//{
//cout<<CV_MAT_ELEM(uvLimits,float,p,q)<<"\n";
//}
//}
//SHOW_MAT(uvLimits,"uvLimits");
//get these points on the ground plane
CvMat * xyLimitsp = cvCreateMat(2, 4, FLOAT_MAT_TYPE);
CvMat xyLimits = *xyLimitsp;
mcvTransformImage2Ground(&uvLimits, &xyLimits,cameraInfo);
////****************************************************************88???????????????//////////////
//CvMat * midxyLimitsp = cvCreateMat(1, 4, FLOAT_MAT_TYPE);
//CvMat midxyLimits = *midxyLimitsp;
//mcvTransformImageMid2Ground(&miduvLimits, &midxyLimits,cameraInfo);
//SHOW_MAT(xyLimitsp, "xyLImits");
/////////////////**********midpointXYcoordinates(world)********************88***********************8
//CvMat * midxyLimitsp = cvCreateMat(1, 4, FLOAT_MAT_TYPE);
// CvMat midxyLimits = *midxyLimitsp;
//mcvTransformImage2Ground(&miduvLimits, &midxyLimits,cameraInfo);
//SHOW_MAT(midxyLimitsp, "midxyLImits");
//get extent on the ground plane
/////*************************************************************************************************8
CvMat row1, row2;
cvGetRow(&xyLimits, &row1, 0);
cvGetRow(&xyLimits, &row2, 1);
double xfMax, xfMin, yfMax, yfMin;
cvMinMaxLoc(&row1, (double*)&xfMin, (double*)&xfMax, 0, 0, 0);
cvMinMaxLoc(&row2, (double*)&yfMin, (double*)&yfMax, 0, 0, 0);
INT outRow = outImage->height;
INT outCol = outImage->width;
FLOAT_MAT_ELEM_TYPE stepRow = (yfMax-yfMin)/outRow;
FLOAT_MAT_ELEM_TYPE stepCol = (xfMax-xfMin)/outCol;
//construct the grid to sample
CvMat *xyGrid = cvCreateMat(2, outRow*outCol, FLOAT_MAT_TYPE);
INT i, j;
FLOAT_MAT_ELEM_TYPE x, y;
//fill it with x-y values on the ground plane in world frame
for (i=0, y=yfMax-.5*stepRow; i<outRow; i++, y-=stepRow)
for (j=0, x=xfMin+.5*stepCol; j<outCol; j++, x+=stepCol)
{
CV_MAT_ELEM(*xyGrid, FLOAT_MAT_ELEM_TYPE, 0, i*outCol+j) = x;
CV_MAT_ELEM(*xyGrid, FLOAT_MAT_ELEM_TYPE, 1, i*outCol+j) = y;
}
//get their pixel values in image frame
CvMat *uvGrid = cvCreateMat(2, outRow*outCol, FLOAT_MAT_TYPE);
mcvTransformGround2Image(xyGrid, uvGrid, cameraInfo);
//now loop and find the nearest pixel value for each position
//that's inside the image, otherwise put it zero
FLOAT_MAT_ELEM_TYPE ui, vi;
//get mean of the input image
CvScalar means = cvAvg(inImage);
double mean = means.val[0];
//generic loop to work for both float and int matrix types
#define MCV_GET_IPM(type) \
for (i=0; i<outRow; i++) \
for (j=0; j<outCol; j++) \
{ \
/*get pixel coordiantes*/ \
ui = CV_MAT_ELEM(*uvGrid, FLOAT_MAT_ELEM_TYPE, 0, i*outCol+j); \
vi = CV_MAT_ELEM(*uvGrid, FLOAT_MAT_ELEM_TYPE, 1, i*outCol+j); \
/*check if out-of-bounds*/ \
/*if (ui<0 || ui>u-1 || vi<0 || vi>v-1) \*/ \
if (ui<ipmInfo->ipmLeft || ui>ipmInfo->ipmRight || \
vi<ipmInfo->ipmTop || vi>ipmInfo->ipmBottom) \
{ \
CV_MAT_ELEM(*outImage, type, i, j) = (type)mean; \
} \
/*not out of bounds, then get nearest neighbor*/ \
else \
{ \
/*Bilinear interpolation*/ \
if (ipmInfo->ipmInterpolation == 0) \
{ \
int x1 = int(ui), x2 = int(ui+1); \
int y1 = int(vi), y2 = int(vi+1); \
float x = ui - x1, y = vi - y1; \
float val = CV_MAT_ELEM(*inImage, type, y1, x1) * (1-x) * (1-y) + \
CV_MAT_ELEM(*inImage, type, y1, x2) * x * (1-y) + \
CV_MAT_ELEM(*inImage, type, y2, x1) * (1-x) * y + \
CV_MAT_ELEM(*inImage, type, y2, x2) * x * y; \
CV_MAT_ELEM(*outImage, type, i, j) = (type)val; \
} \
/*nearest-neighbor interpolation*/ \
else \
CV_MAT_ELEM(*outImage, type, i, j) = \
CV_MAT_ELEM(*inImage, type, int(vi+.5), int(ui+.5)); \
} \
if (outPoints && \
(ui<ipmInfo->ipmLeft+10 || ui>ipmInfo->ipmRight-10 || \
vi<ipmInfo->ipmTop || vi>ipmInfo->ipmBottom-2) )\
outPoints->push_back(cvPoint(j, i)); \
}
if (CV_MAT_TYPE(inImage->type)==FLOAT_MAT_TYPE)
{
MCV_GET_IPM(FLOAT_MAT_ELEM_TYPE)
}
else
{
MCV_GET_IPM(INT_MAT_ELEM_TYPE)
}
//return the ipm info
ipmInfo->xLimits[0] = CV_MAT_ELEM(*xyGrid, FLOAT_MAT_ELEM_TYPE, 0, 0);
ipmInfo->xLimits[1] =
CV_MAT_ELEM(*xyGrid, FLOAT_MAT_ELEM_TYPE, 0, (outRow-1)*outCol+outCol-1);
ipmInfo->yLimits[1] = CV_MAT_ELEM(*xyGrid, FLOAT_MAT_ELEM_TYPE, 1, 0);
ipmInfo->yLimits[0] =
CV_MAT_ELEM(*xyGrid, FLOAT_MAT_ELEM_TYPE, 1, (outRow-1)*outCol+outCol-1);
ipmInfo->xScale = 1/stepCol;
ipmInfo->yScale = 1/stepRow;
ipmInfo->width = outCol;
ipmInfo->height = outRow;
//clean
cvReleaseMat(&xyLimitsp);
cvReleaseMat(&xyGrid);
cvReleaseMat(&uvGrid);
}
/**
* Transforms points from the image frame (uv-coordinates)
* into the real world frame on the ground plane (z=-height)
*
* \param inPoints input points in the image frame
* \param outPoints output points in the world frame on the ground
* (z=-height)
* \param cemaraInfo the input camera parameters
*
*/
void mcvTransformImage2Ground(const CvMat *inPoints,
CvMat *outPoints, const CameraInfo *cameraInfo)
{
//add two rows to the input points
CvMat *inPoints4 = cvCreateMat(inPoints->rows+2, inPoints->cols,
cvGetElemType(inPoints));
//cout<<cvGetElemType(inPoints)<<"/n"<<"matrixtype***************8";
//copy inPoints to first two rows
//SHOW_MAT(inPoints4,"inPoints4beforemultiplied");
//cout<<"^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^";
CvMat inPoints2, inPoints3, inPointsr4, inPointsr3;
cvGetRows(inPoints4, &inPoints2, 0, 2);
cvGetRows(inPoints4, &inPoints3, 0, 3);
int i , j;
//cout<<"printing inPoints3";
//for(i=0;i<inPoints3.rows;i++)
//{
//for(j=0;j<inPoints3.cols;j++)
//{
//cout<<CV_MAT_ELEM(inPoints3,float,i,j)<<"\n";
//}
//}
cvGetRow(inPoints4, &inPointsr3, 2);
//cout<<"printing inPointsr3";
//for(i=0;i<inPointsr3.rows;i++)
//{
//for(j=0;j<inPointsr3.cols;j++)
//{
//cout<<CV_MAT_ELEM(inPointsr3,float,i,j)<<"\n";
//}
//}
cvGetRow(inPoints4, &inPointsr4, 3);
//cout<<"printing inPointsr4";
//for(i=0;i<inPointsr4.rows;i++)
//{
//for(j=0;j<inPointsr4.cols;j++)
//{
//cout<<CV_MAT_ELEM(inPointsr4,float,i,j)<<"\n";
//}
//}
cvSet(&inPointsr3, cvRealScalar(1));
//cout<<"printing inPointsr3";
//for(i=0;i<inPointsr3.rows;i++)
//{
//for(j=0;j<inPointsr3.cols;j++)
//{
//cout<<CV_MAT_ELEM(inPointsr3,float,i,j)<<"\n";
//}
//}
//cout<<"printing inPoints3";
//for(i=0;i<inPoints3.rows;i++)
//{
//for(j=0;j<inPoints3.cols;j++)
//{
//cout<<CV_MAT_ELEM(inPoints3,float,i,j)<<"\n";
//}
//}
cvCopy(inPoints, &inPoints2);
//cout<<"printing inPoints2";
//for(i=0;i<inPoints2.rows;i++)
//{
//for(j=0;j<inPoints2.cols;j++)
//{
//cout<<CV_MAT_ELEM(inPoints2,float,i,j)<<"\n";
//}
//}
//cout<<"printing inPoints3";
//for(i=0;i<inPoints3.rows;i++)
//{
//for(j=0;j<inPoints3.cols;j++)
//{
//cout<<CV_MAT_ELEM(inPoints3,float,i,j)<<"\n";
//}
//}
//create the transformation matrix
float c1 = cos(cameraInfo->pitch);
float s1 = sin(cameraInfo->pitch);
float c2 = cos(cameraInfo->yaw);
float s2 = sin(cameraInfo->yaw);
float matp[] = {
-cameraInfo->cameraHeight*c2/cameraInfo->focalLength.x,
cameraInfo->cameraHeight*s1*s2/cameraInfo->focalLength.y,
(cameraInfo->cameraHeight*c2*cameraInfo->opticalCenter.x/
cameraInfo->focalLength.x)-
(cameraInfo->cameraHeight *s1*s2* cameraInfo->opticalCenter.y/
cameraInfo->focalLength.y) - cameraInfo->cameraHeight *c1*s2,
cameraInfo->cameraHeight *s2 /cameraInfo->focalLength.x,
cameraInfo->cameraHeight *s1*c2 /cameraInfo->focalLength.y,
(-cameraInfo->cameraHeight *s2* cameraInfo->opticalCenter.x
/cameraInfo->focalLength.x)-(cameraInfo->cameraHeight *s1*c2*
cameraInfo->opticalCenter.y /cameraInfo->focalLength.y) -
cameraInfo->cameraHeight *c1*c2,
0,
cameraInfo->cameraHeight *c1 /cameraInfo->focalLength.y,
(-cameraInfo->cameraHeight *c1* cameraInfo->opticalCenter.y /
cameraInfo->focalLength.y) + cameraInfo->cameraHeight *s1,
0,
-c1 /cameraInfo->focalLength.y,
(c1* cameraInfo->opticalCenter.y /cameraInfo->focalLength.y) - s1,
};
////*******************************************cameramatrix*********************************88
//int i,j;
//for(i=0;i<12;i++)
//{
//cout<<matp[i]<<"\n";
//}
//int i,j;
CvMat mat = cvMat(4, 3, CV_32FC1, matp);
//for(i=0;i<mat.rows;i++)
//{
//for(j=0;j<mat.cols;j++)
//{
// cout<<CV_MAT_ELEM(mat,float,i,j)<<"\n";
// }
//}
//cout<<"**************************8";
//cout<<mat.rows<<"***********"<<mat.cols;
//multiply
//cout<<"printing inPoints3";
//for(i=0;i<inPoints3.rows;i++)
//{
//for(j=0;j<inPoints3.cols;j++)
//{
//cout<<CV_MAT_ELEM(inPoints3,float,i,j)<<"\n";
//}
//}
cvMatMul(&mat, &inPoints3, inPoints4);
//cout<<"printing inPoints3";
//for(i=0;i<inPoints3.rows;i++)
//{
//for(j=0;j<inPoints3.cols;j++)
//{
//cout<<CV_MAT_ELEM(inPoints3,float,i,j)<<"\n";
//}
//}
//cout<<"printing mat";
//for(i=0;i<mat.rows;i++)
// {
//for(j=0;j<mat.cols;j++)
//{
//cout<<CV_MAT_ELEM(mat,float,i,j)<<"\n";
//}
//}
//cout<<"printing inPoints4";
//SHOW_MAT(inPoints4,"****inPoints4");
//cout<<"*****************";
//cout<<"********************"<<inPoints3.rows<<"\n"<<inPoints3.cols<<"/n";
//or(i=0;i<inPoints3.rows;i++)
//{
//for(j=0;j<inPoints3.cols;j++)
//{
//cout<<CV_MAT_ELEM(inPoints3,float,i,j)<<"********";
//}
//}
//divide by last row of inPoints4
for (int i=0; i<inPoints->cols; i++)
{
float div = CV_MAT_ELEM(inPointsr4, float, 0, i);
CV_MAT_ELEM(*inPoints4, float, 0, i) =
CV_MAT_ELEM(*inPoints4, float, 0, i) / div ;
CV_MAT_ELEM(*inPoints4, float, 1, i) =
CV_MAT_ELEM(*inPoints4, float, 1, i) / div;
}
//put back the result into outPoints
cvCopy(&inPoints2, outPoints);
//cout<<"printing inPoints2";
//for(i=0;i<inPoints2.rows;i++)
//{
//for(j=0;j<inPoints2.cols;j++)
//{
//cout<<CV_MAT_ELEM(inPoints2,float,i,j)<<"\n";
//}
//}
//clear
cvReleaseMat(&inPoints4);
}
/**
* Transforms points from the ground plane (z=-h) in the world frame
* into points on the image in image frame (uv-coordinates)
*
* \param inPoints 2xN array of input points on the ground in world coordinates
* \param outPoints 2xN output points in on the image in image coordinates
* \param cameraInfo the camera parameters
*
*/
void mcvTransformGround2Image(const CvMat *inPoints,
CvMat *outPoints, const CameraInfo *cameraInfo)
{
//add two rows to the input points
CvMat *inPoints3 = cvCreateMat(inPoints->rows+1, inPoints->cols,
cvGetElemType(inPoints));
//copy inPoints to first two rows
CvMat inPoints2, inPointsr3;
cvGetRows(inPoints3, &inPoints2, 0, 2);
cvGetRow(inPoints3, &inPointsr3, 2);
cvSet(&inPointsr3, cvRealScalar(-cameraInfo->cameraHeight));
cvCopy(inPoints, &inPoints2);
//create the transformation matrix
float c1 = cos(cameraInfo->pitch);
float s1 = sin(cameraInfo->pitch);
float c2 = cos(cameraInfo->yaw);
float s2 = sin(cameraInfo->yaw);
float matp[] = {
cameraInfo->focalLength.x * c2 + c1*s2* cameraInfo->opticalCenter.x,
-cameraInfo->focalLength.x * s2 + c1*c2* cameraInfo->opticalCenter.x,
- s1 * cameraInfo->opticalCenter.x,
s2 * (-cameraInfo->focalLength.y * s1 + c1* cameraInfo->opticalCenter.y),
c2 * (-cameraInfo->focalLength.y * s1 + c1* cameraInfo->opticalCenter.y),
-cameraInfo->focalLength.y * c1 - s1* cameraInfo->opticalCenter.y,
c1*s2,
c1*c2,
-s1
};
CvMat mat = cvMat(3, 3, CV_32FC1, matp);
//int s,t;
//cout<<"printing ground2image";
//for(s=0;s<mat.rows;s++)
//{
//for(t=0;t<mat.cols;t++)
//{
//cout<<CV_MAT_ELEM(mat,float,s,t)<<"\n";
//}
//}
int i,j;
//multiply
cvMatMul(&mat, inPoints3, inPoints3);
//divide by last row of inPoints4
for (int i=0; i<inPoints->cols; i++)
{
float div = CV_MAT_ELEM(inPointsr3, float, 0, i);
CV_MAT_ELEM(*inPoints3, float, 0, i) =
CV_MAT_ELEM(*inPoints3, float, 0, i) / div ;
CV_MAT_ELEM(*inPoints3, float, 1, i) =
CV_MAT_ELEM(*inPoints3, float, 1, i) / div;
}
//put back the result into outPoints
cvCopy(&inPoints2, outPoints);
//cout<<"printing outPoints2";
//SHOW_MAT(outPoints,"outPoints");
//clear
cvReleaseMat(&inPoints3);
}
/**
* Computes the vanishing point in the image plane uv. It is
* the point of intersection of the image plane with the line
* in the XY-plane in the world coordinates that makes an
* angle yaw clockwise (form Y-axis) with Y-axis
*
* \param cameraInfo the input camera parameter
*
* \return the computed vanishing point in image frame
*
*/
FLOAT_POINT2D mcvGetVanishingPoint(const CameraInfo *cameraInfo)
{
//get the vp in world coordinates
FLOAT_MAT_ELEM_TYPE vpp[] = {sin(cameraInfo->yaw)/cos(cameraInfo->pitch),
cos(cameraInfo->yaw)/cos(cameraInfo->pitch), 0};
CvMat vp = cvMat(3, 1, FLOAT_MAT_TYPE, vpp);
//transform from world to camera coordinates
//
//rotation matrix for yaw
FLOAT_MAT_ELEM_TYPE tyawp[] = {cos(cameraInfo->yaw), -sin(cameraInfo->yaw), 0,
sin(cameraInfo->yaw), cos(cameraInfo->yaw), 0,
0, 0, 1};
CvMat tyaw = cvMat(3, 3, FLOAT_MAT_TYPE, tyawp);
//rotation matrix for pitch
FLOAT_MAT_ELEM_TYPE tpitchp[] = {1, 0, 0,
0, -sin(cameraInfo->pitch), -cos(cameraInfo->pitch),
0, cos(cameraInfo->pitch), -sin(cameraInfo->pitch)};
CvMat transform = cvMat(3, 3, FLOAT_MAT_TYPE, tpitchp);
//combined transform
cvMatMul(&transform, &tyaw, &transform);
//
//transformation from (xc, yc) in camra coordinates
// to (u,v) in image frame
//
//matrix to shift optical center and focal length
FLOAT_MAT_ELEM_TYPE t1p[] = {
cameraInfo->focalLength.x, 0,
cameraInfo->opticalCenter.x,
0, cameraInfo->focalLength.y,
cameraInfo->opticalCenter.y,
0, 0, 1};
CvMat t1 = cvMat(3, 3, FLOAT_MAT_TYPE, t1p);
//combine transform
cvMatMul(&t1, &transform, &transform);
//transform
cvMatMul(&transform, &vp, &vp);
//
//clean and return
//
FLOAT_POINT2D ret;
ret.x = cvGetReal1D(&vp, 0);
ret.y = cvGetReal1D(&vp, 1);
return ret;
}
/**
* Converts a point from IPM pixel coordinates into world coordinates
*
* \param point in/out point
* \param ipmInfo the ipm info from mcvGetIPM
*
*/
void mcvPointImIPM2World(FLOAT_POINT2D *point, const IPMInfo *ipmInfo)
{
//x-direction
point->x /= ipmInfo->xScale;
point->x += ipmInfo->xLimits[0];
//y-direction
point->y /= ipmInfo->yScale;
point->y = ipmInfo->yLimits[1] - point->y;
}
/**
* Converts from IPM pixel coordinates into world coordinates
*
* \param inMat input matrix 2xN
* \param outMat output matrix 2xN
* \param ipmInfo the ipm info from mcvGetIPM
*
*/
void mcvTransformImIPM2Ground(const CvMat *inMat, CvMat* outMat, const IPMInfo *ipmInfo)
{
CvMat *mat;
mat = outMat;
if(inMat != mat)
{
cvCopy(inMat, mat);
}
//work on the x-direction i.e. first row
CvMat row;
cvGetRow(mat, &row, 0);
cvConvertScale(&row, &row, 1./ipmInfo->xScale, ipmInfo->xLimits[0]);
//work on y-direction
cvGetRow(mat, &row, 1);
cvConvertScale(&row, &row, -1./ipmInfo->yScale, ipmInfo->yLimits[1]);
}
/**
* Converts from IPM pixel coordinates into Image coordinates
*
* \param inMat input matrix 2xN
* \param outMat output matrix 2xN
* \param ipmInfo the ipm info from mcvGetIPM
* \param cameraInfo the camera info
*
*/
void mcvTransformImIPM2Im(const CvMat *inMat, CvMat* outMat, const IPMInfo *ipmInfo,
const CameraInfo *cameraInfo)
{
//convert to world coordinates
mcvTransformImIPM2Ground(inMat, outMat, ipmInfo);
//convert to image coordinates
mcvTransformGround2Image(outMat, outMat, cameraInfo);
}
/**
* Initializes the cameraInfo structure with data read from the conf file
*
* \param fileName the input camera conf file name
* \param cameraInfo the returned camera parametrs struct
*
*/
void mcvInitCameraInfo (char * const fileName, CameraInfo *cameraInfo) ///CameraInfo struct defined in IPM.hh
{
//parsed camera data
CameraInfoParserInfo camInfo; ///CameraInfoParserInfo struct defined in CameraInfoOpt.h
//read the data
assert(cameraInfoParser_configfile(fileName, &camInfo, 0, 1, 1)==0);
//init the strucure
cameraInfo->focalLength.x = camInfo.focalLengthX_arg;
cameraInfo->focalLength.y = camInfo.focalLengthY_arg;
cameraInfo->opticalCenter.x = camInfo.opticalCenterX_arg;
cameraInfo->opticalCenter.y = camInfo.opticalCenterY_arg;
cameraInfo->cameraHeight = camInfo.cameraHeight_arg;
cameraInfo->pitch = camInfo.pitch_arg * CV_PI/180;
cameraInfo->yaw = camInfo.yaw_arg * CV_PI/180;
cameraInfo->imageWidth = camInfo.imageWidth_arg;
cameraInfo->imageHeight = camInfo.imageHeight_arg;
}
/**
* Scales the cameraInfo according to the input image size
*
* \param cameraInfo the input/return structure
* \param size the input image size
*
*/
void mcvScaleCameraInfo (CameraInfo *cameraInfo, CvSize size)
{
//compute the scale factor
double scaleX = size.width/cameraInfo->imageWidth;
double scaleY = size.height/cameraInfo->imageHeight;
//scale
cameraInfo->imageWidth = size.width;
cameraInfo->imageHeight = size.height;
cameraInfo->focalLength.x *= scaleX;
cameraInfo->focalLength.y *= scaleY;
cameraInfo->opticalCenter.x *= scaleX;
cameraInfo->opticalCenter.y *= scaleY;
}
/**
* Gets the extent of the image on the ground plane given the camera parameters
*
* \param cameraInfo the input camera info
* \param ipmInfo the IPM info containing the extent on ground plane:
* xLimits & yLimits only are changed
*
*/
void mcvGetIPMExtent(const CameraInfo *cameraInfo, IPMInfo *ipmInfo )
{
//get size of input image
FLOAT u, v;
v = cameraInfo->imageHeight;
u = cameraInfo->imageWidth;
//get the vanishing point
FLOAT_POINT2D vp;
vp = mcvGetVanishingPoint(cameraInfo);
vp.y = MAX(0, vp.y);
//get extent of the image in the xfyf plane
FLOAT_MAT_ELEM_TYPE eps = VP_PORTION*v;
FLOAT_MAT_ELEM_TYPE uvLimitsp[] = {vp.x, u, 0, vp.x,
vp.y+eps, vp.y+eps, vp.y+eps, v};
CvMat uvLimits = cvMat(2, 4, FLOAT_MAT_TYPE, uvLimitsp);
//get these points on the ground plane
CvMat * xyLimitsp = cvCreateMat(2, 4, FLOAT_MAT_TYPE);
CvMat xyLimits = *xyLimitsp;
mcvTransformImage2Ground(&uvLimits, &xyLimits,cameraInfo);
//SHOW_MAT(xyLimitsp, "xyLImits");
//get extent on the ground plane
CvMat row1, row2;
cvGetRow(&xyLimits, &row1, 0);
cvGetRow(&xyLimits, &row2, 1);
double xfMax, xfMin, yfMax, yfMin;
cvMinMaxLoc(&row1, (double*)&xfMin, (double*)&xfMax, 0, 0, 0);
cvMinMaxLoc(&row2, (double*)&yfMin, (double*)&yfMax, 0, 0, 0);
//return
ipmInfo->xLimits[0] = xfMin;
ipmInfo->xLimits[1] = xfMax;
ipmInfo->yLimits[1] = yfMax;
ipmInfo->yLimits[0] = yfMin;
}
} // namespace LaneDetector