C++ vs processing test weird results.
in
Programming Questions
•
10 months ago
For a game I'm designing i wanted to test out the performance differences between C++, C# and processing. However I became stumped after processing ran over twice as fast as C++ for a simple test I made. Does this make sense??
The processing timer class code is :
The processing timer class code is :
- class Testerlegacy
{
double one;
double two;
int counter = 0;
int calcs;
public void test(double p)
{
println("single core checker to proceeding");
one = 2;
two = p;
calcs = 1000000;
println("start");
int init, Final;
init=millis();
while ( counter < calcs)
{
one = one * two;
two = one * one;
one = one * two;
two = one * one;
one = one * two;
two = one * one;
one = one * two;
two = one * one;
one = one * two;
two = one * one;
one = one * two;
two = one * one;
one = one * two;
two = one * one;
one = one * two;
two = one * one;
one = one * two;
two = one * one;
one = one * two;
two = one * one;
counter = counter + 1;
}
Final=millis()-init;
double Final2 = Final;
println("time taken for all calcs:");
println(Final2 / 1000);
println("Calcs per 1/30 seconds:");
println(calcs*20);
print(((calcs*20)/(Final2 / 1000))/30);
print(" - ");
println((((calcs*21)/(Final2 / (1000)))/30));
}
};
The C++ timer class code is:
- class Testerlegacy
{
double one;
double two;
double counter;
string checker;
int calcs;
public:
void test(double x)
{
cout << "Do you wish for single core checker to proceed? Press n for no." << endl;
cin >> checker;
if (checker == "n")
{
return;
}
one = 2;
two = x;
calcs = 1000000;
cout << "start" << endl; // 300000 calcs assumes cout counter and while loop add up to 1
clock_t init, final;
init=clock();
while ( counter < calcs)
{
one = one * two;
two = one * one;
one = one * two;
two = one * one;
one = one * two;
two = one * one;
one = one * two;
two = one * one;
one = one * two;
two = one * one;
one = one * two;
two = one * one;
one = one * two;
two = one * one;
one = one * two;
two = one * one;
one = one * two;
two = one * one;
one = one * two;
two = one * one;
counter = counter + 1;
}
final=clock()-init;
cout << init << endl;
cout << (double)final << endl << (double)CLOCKS_PER_SEC << endl;
cout << "time taken for all calcs: " << (double)final / ((double)CLOCKS_PER_SEC) << endl;
cout << "Calcs per 1/30 seconds: " << ((calcs*20)/((double)final / ((double)CLOCKS_PER_SEC)))/30 << " - " << ((calcs*21)/((double)final / ((double)CLOCKS_PER_SEC)))/30;
cout << endl;
cout << "end" << endl;
}
};
After running these pieces of code multiple times on C++ I averaged around 1,60,000 calcs per 1/30 whereas the processing code was getting 1,100,000 calcs per 1/30.
sample output for C++ :
sample output for C++ :
- Do you wish for single core checker to proceed? Press n for no.
k
start
0
4.7e+06
1e+06
time taken for all calcs: 4.7
Calcs per 1/30 seconds: 141844 - 148936
end
- single core checker to proceeding
start
time taken for all calcs:
0.583
Calcs per 1/30 seconds:
20000000
1143510.5774728418 - 1200686.1063464838
Is my computer playing up? Is my code wrong? Is it an unbalanced test? Or does processing just beast C++?
1