以下代码测试 2 个 setter 调用的时钟时间。一个检查空指针,一个不检查。程序编译后的输出 -O3 标志给出了相同的时间:
0.000000 ticks (0.000000 secs)
0.000000 ticks (0.000000 secs)
使用 -O0 标志,时间差异仍然很小
4250000.0000 ticks (4.25000 secs)
4230000.0000 ticks (4.25000 secs)
这是由编译器在 for 循环中优化的吗?如果是这样,应该如何测试两者效率差异的“真实”世界场景?
class B {};
class A
{
public:
void set( int a ) { a_ = a; }
int get() const { return a_; }
private:
int a_;
};
class A1
public:
A1() : b_(0) {}
void set( int a ) { if( b_ ) a_ = a; }
int get() const { return a_; }
private:
int a_;
B* b_;
};
int main()
{
const int n=1000000000;
clock_t t0, t1;
A a;
A1 a1;
t0 = clock();
for( int i=0; i < n; ++i )
a.set( i );
t1 = clock();
printf( "%f ticks (%.6f secs)\n", (double) t1-t0, ((double) t1-t0) / CLOCKS_PER_SEC );
t0 = clock();
for( int i=0; i < n; ++i )
a1.set( i );
t1 = clock();
printf( "%f ticks (%.6f secs)\n", (double) t1-t0, ((double) t1-t0) / CLOCKS_PER_SEC );
return 0;
}