如何解决linux timer错误?

时间:2014-02-22 16:57:36

标签: c linux timer

#include <features.h>
#include <time.h>
#include <sys/time.h>
#include <stdio.h>
#include <stdlib.h>
#include <string.h>
#include <signal.h>
#include <unistd.h>
#include <errno.h>
#include <sys/types.h>
#include <sys/socket.h>
#include <netdb.h>

#include <netinet/in.h>
#include <arpa/inet.h>

typedef unsigned int uint32;

#define million 1000000L

long duration2ms, duration10ms, duration100ms;
double Task2ms_Raster, Task10ms_Raster, Task100ms_Raster;
timer_t firstTimerID, secondTimerID, thirdTimerID;

void TASK1(Task2ms_Raster) {
    struct timespec start, stop;
    int a, b, c;
    uint32 StartTime, StopTime;
    a=1, b=2, c=3;

    if((StartTime = clock_gettime(CLOCK_REALTIME, &start)) == -1) {
        perror("clock gettime");
    }


       a= b+c;
        b = c+a;
        c= a+b;

        b = c+a;
        c= a+b; a= b+c;
        b = c+a;
        c= a+b; a= b+c;
        b = c+a;
        c= a+b; a= b+c;
        b = c+a;
        c= a+b; a= b+c;
        b = c+a;
        c= a+b; a= b+c;
        b = c+a;
        c= a+b; a= b+c;
        b = c+a;
        c= a+b;

//我曾多次这样做过。

    printf("ETAS\n");
    printf("ETAS1\n");
    if((StopTime = clock_gettime( CLOCK_REALTIME, &stop)) == -1) {
        perror("clock gettime");
    }
    duration2ms = (stop.tv_sec - start.tv_sec) +
                  (double)(stop.tv_nsec - start.tv_nsec) /
                  (double)million;
    printf("time difference is= %ld\n", duration2ms);
}

void TASK2(Task10ms_Raster) {
    struct timespec start, stop;
    if(clock_gettime( CLOCK_REALTIME, &start) == -1) {
        perror("clock gettime");
    }
    printf("ETAS2\n");
    printf("ETAS3\n");
    if(clock_gettime(CLOCK_REALTIME, &stop) == -1) {
        perror("clock gettime");
    }
    duration10ms = (stop.tv_sec - start.tv_sec) +
                   (double)( stop.tv_nsec - start.tv_nsec) /
                   (double)million;
    printf("time difference is= %ld\n", duration10ms);
}

void TASK3(Task100ms_Raster) {
    struct timespec start, stop;
    if(clock_gettime( CLOCK_REALTIME, &start) == -1) {
        perror("clock gettime");
    }
    printf("ETAS4\n");
    printf("ETAS5\n");
    if((clock_gettime(CLOCK_REALTIME, &stop)) == -1) {
        perror("clock gettime");
    }
    duration100ms = (stop.tv_sec - start.tv_sec) +
                    (double)(stop.tv_nsec - start.tv_nsec) /
                    (double)million;
    printf( "time difference is= %ld\n", duration100ms );
}

static void timerHandler(int sig, siginfo_t *si, void *uc) {
    timer_t *tidp;
    tidp = si->si_value.sival_ptr;
    if (*tidp == firstTimerID)
        TASK1(Task2ms_Raster);
    else if(*tidp == secondTimerID)
        TASK2(Task10ms_Raster);
    else if(*tidp == thirdTimerID)
        TASK3(Task100ms_Raster);
}

static int makeTimer(char *name, 
                     timer_t *timerID, 
                     int expireMS, 
                     int intervalMS) {

    struct sigevent         te;
    struct itimerspec       its;
    struct sigaction        sa;
    int                     sigNo = SIGRTMIN;

    /* Set up signal handler. */
    sa.sa_flags = SA_SIGINFO;
    sa.sa_sigaction = timerHandler;
    sigemptyset(&sa.sa_mask);
    if(sigaction(sigNo, &sa, NULL) == -1) {
        perror("sigaction");
    }

    /* Set and enable alarm */
    te.sigev_notify = SIGEV_SIGNAL;
    te.sigev_signo = sigNo;
    te.sigev_value.sival_ptr = timerID;
    timer_create(CLOCK_REALTIME, &te, timerID);

    its.it_interval.tv_sec = 0;
    its.it_interval.tv_nsec = intervalMS * 1000000;
    its.it_value.tv_sec = 0;
    its.it_value.tv_nsec = expireMS * 1000000;
    timer_settime(*timerID, 0, &its, NULL);

    return 1;
}

int main(void) {
    makeTimer("First Timer", &firstTimerID, 2, 2);   //2ms
    makeTimer("Second Timer", &secondTimerID, 10, 10);    //10ms
    makeTimer("Third Timer", &thirdTimerID, 100, 100);  //100ms
    while(1) {
        sleep(100);
    }
}

我创建了一个计时器,每2ms,10ms和100ms调用一次任务。任务只是打印值并计算打印值的开始时间和停止时间。当我运行上述程序时,它不显示开始时间和停止时间之间的时间差(即duration2ms,持续时间10ms和持续时间100ms)。有人可以帮助我。

2 个答案:

答案 0 :(得分:1)

在所有情况下,时差将显示为零,因为您正在测量打印两条线所需的时间,这非常快。您计时调用每个任务之间的时间。

如果要测量任务调用之间的时间,则需要保留时间。举个例子,我将展示一个任务:

void TASK3(Task100ms_Raster) {
     static struct timespec start, stop = { .tv_sec = -1 }; // static duration!
     if (stop.tv_sec < 0) {
        (void) clock_gettime( CLOCK_REALTIME, &stop); // first time run
     }
     start = stop; // start from previous stop time

     // do whatever here

     (void) clock_gettime( CLOCK_REALTIME, &stop);

     duration100ms = (stop.tv_sec - start.tv_sec)
                     + (double)(stop.tv_nsec - start.tv_nsec)
                     / (double)million;
     printf( "time difference is= %ld\n", duration100ms );
}

答案 1 :(得分:0)

问题是您不能在任何地方将时间值保存在TASK *()中。这意味着clock_gettime()连续两次被调用,中间很少或没有时间。你应该做的是以下几点:

#include <features.h>
#include <time.h>
#include <sys/time.h>
#include <stdio.h>
#include <stdlib.h>
#include <string.h>
#include <signal.h>
#include <unistd.h>
#include <errno.h>
#include <sys/types.h>
#include <sys/socket.h>
#include <netdb.h>

#include <netinet/in.h>
#include <arpa/inet.h>

typedef unsigned int uint32;
timer_t tmr;
struct timespec prev;

static void handle_timer(int sig, siginfo_t *si, void *uc) {
  timer_t *tidp;
  struct timespec current;

  tidp = si->si_value.sival_ptr;
  clock_gettime(CLOCK_REALTIME, &current);
  printf("dif between calls to handle_timer: %ld\n",
    current.tv_sec - prev.tv_sec);
  prev = current;
}

int main(int argc, char **argv) {
  struct sigevent se;
  struct itimerspec       its;
  struct sigaction        sa;
  clock_gettime(CLOCK_REALTIME, &prev);

  /* Set up signal handler. */
  sa.sa_flags = SA_SIGINFO;
  sa.sa_sigaction = handle_timer;
  sigemptyset(&sa.sa_mask);
  if (sigaction(SIGRTMIN, &sa, NULL) == -1)
    perror("sigaction");

  /* Set and enable alarm */
  se.sigev_notify = SIGEV_SIGNAL;
  se.sigev_signo = SIGRTMIN;
  se.sigev_value.sival_ptr = &tmr;
  timer_create(CLOCK_REALTIME, &se, &tmr);

  its.it_interval.tv_sec = 1;
  its.it_value.tv_sec = 1;
  its.it_value.tv_nsec = 0;
  timer_settime(&tmr, 0, &its, NULL);

  while(1)
    sleep(100);
  return 0;
}

这里的不同之处在于我们实际上节省了调用handle_time()的时间,然后根据节省的时间计算差异。