code check to speed it up

Hi guys

I hope you don't mind in helping me to try and speed up the execution process by suggesting some changes, as i might have coded inefficiently.

I calculate the time it takes to execute each frame captured by the webcam from while(1) to end and it is about 2 seconds approx.

but when i run the entire thing without stepping it takes forever than it did with checking execution time of one loop.

1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
int main()
{
	Mat image, grayscale;
	int numcorners, horcorner,vercorner;
	Mat icovar;
	Scalar meanmat;

	//covariance for dark combined
	double covar[3][3]={{180.1437, 180.8316, 179.0236},{188.8316,355.5152,238.8029},{179.0236,238.8029,267.9239}};
	meanmat[0]=13.8340;
	meanmat[1]=68.3459;
	meanmat[2]=22.7451;

	Mat covmat(3,3,CV_64F,covar);
	
	Mat mask = Mat::zeros(480, 640, CV_8UC1);	//create matrix same size as image which is 480 by 640 based on the webcam capture
	//intitialize capture
	Vec3b pixel;
	double distance = 200;
	double mdist=0;

	icovar=inversemat(covmat);		//determinant of covariance matrix is zero. SOLVED

	Mat corners;
	printf("Enter number of corners horizontally: ");
    scanf("%d", &horcorner);
	printf("Enter number of corners vertically: ");
    scanf("%d", &vercorner);
	numcorners=horcorner*vercorner;
	namedWindow("original",1);
	
	vector<vector<Point3f>> object_points;
    vector<vector<Point2f>> image_points;

	vector<Point3f> obj;
	vector<Point2f> img;
    
	VideoCapture webcam;
	webcam.open(-1);

	while(1)
	{
		//copy webcam stream to image
		webcam>>image;
		if(!webcam.isOpened())
		{
			cout<<"\nThe Camera is being used by another application, make sure all applications using the camera are closed and try running this program again."<<endl;
			break;
		}
		for(int i = 0; i < image.rows;i++)
		{
			for(int j=0; j<image.cols;j++)	//in case it fails changed it from i=1 to i=0
			{
				pixel= image.at<Vec3b>(i,j);	//prints wierd characters
				mdist=mahadistance(icovar,meanmat,pixel);
				if(mdist<distance)
					mask.at<uchar>(i,j)=255;
				else
					mask.at<uchar>(i,j)=0;
			}
		}
		cvtColor(image,grayscale,CV_BGR2GRAY);
		//goodfeatures(grayimage, output to store corners, quality factor, distance factor)
		goodFeaturesToTrack(grayscale,corners,numcorners,0.1,100);   //good so far 0.1 and 100 also 0.01 and 100 a little ok i chose this
		// Mark these corners on the original image
		cornerSubPix(grayscale, corners, Size(11, 11), Size(-1, -1), TermCriteria(CV_TERMCRIT_EPS | CV_TERMCRIT_ITER, 30, 0.1));
		if(corners.rows==numcorners)
		{
			for(int i=0;i<corners.rows;i++)
			{
				//draws circle on image, at centre at point, color, thickness, line type, 
				circle(image,corners.at<Point2f>(i),3,CV_RGB(255,0,0),1,8,0);
				obj.push_back(Point3f(float(i/vercorner), float(i%vercorner), 0.0f));		//setting up the units of calibration
				img.push_back(corners.at<Point2f>(i));		
			}
			image_points.push_back(img);
	        object_points.push_back(obj);
			Mat intrinsic = Mat(3, 3, CV_32FC1);
			Mat distCoeffs;
			vector<Mat> rvecs;
			vector<Mat> tvecs;

			intrinsic.ptr<float>(0)[0] = 1;
			intrinsic.ptr<float>(1)[1] = 1;
	
			calibrateCamera(object_points, image_points, image.size(), intrinsic, distCoeffs, rvecs, tvecs);	//unhandled exception error at this point not because of too much points
			imshow("original",image);
			imshow("mask",mask);
		}
		
		img.clear();
		obj.clear();
		image_points.clear();
		object_points.clear();
	}
	webcam.release();
	system("PAUSE");
	return 0;
}


my header file, which the functions are only used once.
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
Scalar meanpixel(Mat matrx)
{
	Scalar meanpix;

	for(int i=0;i<matrx.rows;i++)
	{
		for(int j=0;j<matrx.cols;j++)
		{
			meanpix[0]=meanpix[0]+double(matrx.at<Vec3b>(i,j)[0]);
			meanpix[1]=meanpix[1]+double(matrx.at<Vec3b>(i,j)[1]);
			meanpix[2]=meanpix[2]+double(matrx.at<Vec3b>(i,j)[2]);
		}
	}
	meanpix[0]=floor((meanpix[0]/(matrx.rows*matrx.cols))+0.5);
	meanpix[1]=floor((meanpix[1]/(matrx.rows*matrx.cols))+0.5);
	meanpix[2]=floor((meanpix[2]/(matrx.rows*matrx.cols))+0.5);
	return meanpix;
}

Mat covarmatrixcal(Mat matrx1, Scalar meanscal)
{

	double rr=0,rg=0,rb=0,gr=0,gg=0,gb=0,br=0,bg=0,bb=0;
	int k=0;
	k=(matrx1.rows*matrx1.cols);
	Vec3b temp1;
	for(int i = 0; i < matrx1.rows;i++)
	{
		for(int j=0;j<matrx1.cols;j++)
		{
			temp1=matrx1.at<Vec3b>(i,j);
			rr=rr+((double(temp1[0])-meanscal[0])*(double(temp1[0])-meanscal[0]));
			rg=rg+((double(temp1[0])-meanscal[0])*(double(temp1[1])-meanscal[1]));
			rb=rb+((double(temp1[0])-meanscal[0])*(double(temp1[2])-meanscal[2]));
			gg=gg+((double(temp1[1])-meanscal[1])*(double(temp1[1])-meanscal[1]));
			gb=gb+((double(temp1[1])-meanscal[1])*(double(temp1[2])-meanscal[2]));
			bb=bb+((double(temp1[2])-meanscal[2])*(double(temp1[2])-meanscal[2]));
		}
	}
	rr=rr/(k-1);
	rg=rg/(k-1);
	rb=rb/(k-1);
	gr=rg;
	gg=gg/(k-1);
	gb=gb/(k-1);
	br=rb;
	bg=gb;
	bb=bb/(k-1);

	double m[3][3]={{rr,rg,rb},{gr,gg,gb},{br,bg,bb}};

	Mat covmatrix(3,3,CV_64F,m);
	return covmatrix.clone();

}

double mahadistance(Mat invcov, Scalar meanvec, Vec3b patternvec)
{
	double distmeasure=0;
	Scalar x1,x2;
	
	//mahalanobis distance is equal to (x-mean)^T*inv(cov)*(x-mean)
	x1[0]=(patternvec[0]-meanvec[0]);
	x1[1]=(patternvec[1]-meanvec[1]);
	x1[2]=(patternvec[2]-meanvec[2]);
	
	for(int k = 0; k < 3;k++)
	{
		x2[0]=x2[0]+(x1[k]*invcov.at<double>(k,0));
		x2[1]=x2[1]+(x1[k]*invcov.at<double>(k,1));  //-1.#QNAN is element of inverse covariance matrix
		x2[2]=x2[2]+(x1[k]*invcov.at<double>(k,2));
	}

	//x2[0]=(x1[0]*invcov.at<double>(1,1)+x1[1]*invcov.at<double>(2,1)+x1[2]*invcov.at<double>(3,1));
	//x2[1]=(x1[0]*invcov.at<double>(1,2)+x1[1]*invcov.at<double>(2,2)+x1[2]*invcov.at<double>(3,2));
	//x2[2]=(x1[0]*invcov.at<double>(1,3)+x1[1]*invcov.at<double>(2,3)+x1[2]*invcov.at<double>(3,3));

	distmeasure=((x2[0]*x1[0])+(x2[1]*x1[1])+(x2[2]*x1[2]));
	return distmeasure;
}

Mat inversemat(Mat matrx)
{
	double determ;
	determ = (matrx.at<double>(0,0)*((matrx.at<double>(1,1)*matrx.at<double>(2,2))-(matrx.at<double>(1,2)*matrx.at<double>(2,1))))-(matrx.at<double>(0,1)*((matrx.at<double>(1,0)*matrx.at<double>(2,2))-(matrx.at<double>(2,0)*matrx.at<double>(1,2))))+(matrx.at<double>(0,2)*((matrx.at<double>(1,0)*matrx.at<double>(2,1))-(matrx.at<double>(1,1)*matrx.at<double>(2,0))));
	Mat mt=matrx.t();
	double m11,m12,m13,m21,m22,m23,m31,m32,m33;
	
	m11=(mt.at<double>(1,1)*mt.at<double>(2,2))-(mt.at<double>(2,1)*mt.at<double>(1,2));
	m12=(mt.at<double>(1,0)*mt.at<double>(2,2))-(mt.at<double>(2,0)*mt.at<double>(1,2));
	m13=(mt.at<double>(1,0)*mt.at<double>(2,1))-(mt.at<double>(2,0)*mt.at<double>(1,1));
	m21=(mt.at<double>(0,1)*mt.at<double>(2,2))-(mt.at<double>(2,1)*mt.at<double>(0,2));
	m22=(mt.at<double>(0,0)*mt.at<double>(2,2))-(mt.at<double>(2,0)*mt.at<double>(0,2));
	m23=(mt.at<double>(0,0)*mt.at<double>(2,1))-(mt.at<double>(2,0)*mt.at<double>(0,1));
	m31=(mt.at<double>(0,1)*mt.at<double>(1,2))-(mt.at<double>(1,1)*mt.at<double>(0,2));
	m32=(mt.at<double>(0,0)*mt.at<double>(1,2))-(mt.at<double>(1,0)*mt.at<double>(0,2));
	m33=(mt.at<double>(0,0)*mt.at<double>(1,1))-(mt.at<double>(1,0)*mt.at<double>(0,1));

	double imat[3][3]={{(m11/determ),(-m12/determ),(m13/determ)},{(-m21/determ),(m22/determ),(-m23/determ)},{(m31/determ),(-m32/determ),(m33/determ)}};
	Mat invmat(3,3,CV_64F,imat);
	return invmat.clone();	//as only the address to where the data is stored is passed to the function the object is destroyed so an empty matrix is returned with initial values -9.23842e+61
}


thanks in advance
really appreciate it.
It would be good if you say what are you trying to accomplish.

> I calculate the time it takes to execute each frame (...) and it is about 2 seconds approx.
> but when i run the entire thing without stepping it takes forever
Sorry, I don't follow. ¿how did you measure the execution time?
imshow("original",image); does not spawn any windows
I calculated it by estimating the time it takes to complete executing one frame.

what i am trying to accomplish is faster execution time per frame so i may see some results.

imshow("original", image); shows the original image.
nope, it doesn't
change it to
1
2
cv::Mat image_aux = image;
imshow("original", image_aux);



I mean what's the program supposed to do
The program is supposed to do the following,

capture frame

segment image to separate foreground from background

detect corners and location of corners

calibrate camera using the corners

display

loop

p.s the line to change it to, did i use a name already used by opencv.
thanks
Ok, so i have changed it further by taking some elements out of the loop that is not needed in the loop.

anymore suggestions.

thanks
btw when chaning it to Mat image_aux = image;

won't i have to change the part of the code which draws circle on the original image to image_aux instead of image as i will be showing image_aux.

anymore suggestions?
Sorry, I was wrong
Please read this http://stackoverflow.com/a/6949428

Consider using a profiler to decide where to improve.
Also, ¿how do I test it?
You talk about segmentation and corners, ¿does the object need to be an special shape, size, color, in movement? ¿what?
what's a profiler and how do i use it to decide where to improve.

the image segmentation part is separating green background from an image with an object in front of a green background, the corner detection finds corners on a mark on a paper.
Topic archived. No new replies allowed.