我为我大学的抽样任务编写了这段代码。
#include <sys/time.h>
#include <stdio.h>
#include <unistd.h>
#include <stdlib.h>
int main(int argc, char **argv){
struct timeval tv;
float t = atoi(argv[1]); //sampling time period in sec's
float dt = atoi(argv[2]); //sampling rate in msec's
double time;
int nsamples = t/dt * 1000; //number of samples floored
//samples storage array
double *samples;
samples = malloc(nsamples);
printf("%d\n\n",nsamples);
int c = 0; //array index
double divergance;
gettimeofday(&tv, NULL);
time =(double) tv.tv_sec + tv.tv_usec / 1000000.0f;
samples[c] = time;
printf("time: %f\n", samples[c]);
usleep(dt * 1000);
while(c<nsamples){
c++;
gettimeofday(&tv, NULL);
time = (double) tv.tv_sec + tv.tv_usec / 1000000.0f;
samples[c] = time;
//divergance calculated in msec's
divergance = (samples[c] - samples[c-1]);
if (c==9){
printf("%f \n \n%f", samples[c-1], samples[c]);
}
printf("time: %f\ndivergance: %f ms\n\n", samples[c], divergance*1000);
usleep(dt *1000);
}
}
这是我的输出
时间:1557335682.435666 分歧:200.127125 ms
时间:1557335682.635813 分歧:200.146914 ms
时间:1557335682.835952 分歧:200.139046 ms
时间:1557335683.036075 分歧:200.123072 ms
time: 1557335683.236192 divergance: -50328976507548121002151598324465532616014103321089770750300716493231241208217866953937760599346823570331739493744117764925654540012842402655523878795775819489233146901362588461216017208320541658368563434403808909221817741213696.000000 ms
时间:1557335683.436400 分歧:1557335683436.399902 ms
时间:1557335683.636521 分歧:1557335683636.520752 ms
时间:1557335683.836647 分歧:1557335683836.646973 ms
有谁知道第五次计算的奇怪输出是什么。我无法想象任何合乎逻辑的解释,因为我之前没有遇到任何类似的“错误”。它与功能的某些特征有关gettimeofday()
吗?
注意:输入是10
和200