/*
 * Implement micro_second delay in linux
 * without timer.
 *
 * As in the low-perfomance arch,
 * the precision of timer is not always satisfying.
*/
#include<signal.h>
#include<errno.h>
#include<stdlib.h>
#include<unistd.h>
#include<time.h>
#include<sys/time.h>
#include<stdio.h>

void delay(long input_us) {
    /* us means micro_second */
    if(input_us > 19)
        input_us -= 2;

#if 0
#ifdef DEBUG
    else {
        fprintf(stderr, "==== Debug Info =====");
        fprintf(stderr, "delay() input-parameter must greater 20\n");
        fprintf(stderr, "%s : %s : %d\n", __FILE__, __func__, __LINE__);
        fprintf(stderr, "=====================");
        errno = EINVAL;
        return;
    }
#endif
#endif

    int ret;

    long s,     /* second */
         us,    /* micro_second */
         input_s; /* input second */

    struct timeval now;
    ret = gettimeofday(&now, NULL);
    if(ret) {
        perror("gettimeofday()");
        exit(EXIT_FAILURE);
    }

    input_s = input_us / (1000 * 1000);
    input_us += now.tv_usec;
    s = now.tv_sec + input_s;
    us = input_us - input_s * 1000 * 1000;

    while (now.tv_usec < us && now.tv_sec <= s) {
        ret = gettimeofday(&now, NULL);
    }
}
