// $Author$
// $Date$
// $Revision$

#include <iostream>
#include <string>
#include <fstream>
#include <map>
#include <vector>

#include <boost/date_time/posix_time/posix_time.hpp>
//#include <boost/date_time/gregorian/gregorian.hpp>
#include <boost/tokenizer.hpp>
#include <boost/lexical_cast.hpp>

#include <quigon.h>

using namespace std;
using namespace boost::posix_time;
//using namespace boost::gregorian;
using namespace boost;
using boost::lexical_cast;
using namespace quigon;

const size_t DATE_I = 1;
const size_t TIME_I = 2;
const size_t STATION_I = 4;
const size_t AOD500_I = 52;
const size_t AOD675_I = 53;

int main ( int argc, char *argv[] )
{
    if ( argc < 2 )
    {
        cout << "Usage: AOD_data_parser AOD_FILE" << endl;
        exit ( 1 );
    }

    ifstream input_file ( argv[1] );
    if ( ! input_file )
    {
        cout << "Can't open file : " << argv[1] << " to read. Aborting... " << endl;
        exit ( 1 );
    }

    cout << "Preparing" << endl;

    map<string, Time_Serie> station;

    string templine1;
    string templine2;
	string total_line;

    char_separator<char> sep_comma ( "," );
    char_separator<char> sep_slash ( "/" );

    cout << "Reading..." << endl;
    // first 3 comment line..
    getline ( input_file, templine1 );
    getline ( input_file, templine1 );
    getline ( input_file, templine2 );

    try
    {
        while ( ( getline ( input_file, templine1 ), getline ( input_file, templine2 ) ) && !input_file.eof() )
        {
			cout << "Making line token" << endl;

			total_line = templine1+templine2;
			
            tokenizer<char_separator<char> > line_token ( total_line, sep_comma );

            vector<string> line ( line_token.begin(), line_token.end() );
			cout <<"Line size: " <<  line.size() << endl;
            string station_name = line[STATION_I];
            string date_str_old = line[DATE_I];
            string time_str = line[TIME_I];

            double aod500 = lexical_cast<double> ( line[AOD500_I] );
            double aod675 = lexical_cast<double> ( line[AOD675_I] );

            cout << "Find entry: " << station_name << " " << date_str_old << " " << time_str << " " << aod500 << " " << aod675 << endl;
            tokenizer<char_separator<char> > date_token ( date_str_old, sep_slash );
            vector<string> date_str_vector ( date_token.begin(), date_token.end() );

            string date_str = date_str_vector[2] + "-" + date_str_vector[0] + "-" + date_str_vector[1];

            string date_time = date_str + " " + time_str;

            station[station_name + "500"].add_entry ( time_from_string ( date_time ), aod500 );
            station[station_name + "675"].add_entry ( time_from_string ( date_time ), aod675 );
        }
    }
    catch ( ... )
    {
        cout << "Error encounted ... " << endl;
    }
    cout << "Outputing... " << endl;
    for ( map<string, Time_Serie>::iterator iter = station.begin(); iter != station.end(); ++iter )
    {
        string output_filename = iter->first;
        ( iter->second ).save ( output_filename );
    }

    return 0;
}
