""" This file contains utility functions """
import os
import sys
import numpy as np
import gdal
import tarfile
import gzip
from gdalconst import *
import math
import datetime
from constants import __JULIAN_MOCK__
from datetime import datetime as dt
from constants import __NORTH__
from constants import __SOUTH__ 
from constants import __EAST__ 
from constants import __WEST__ 
from constants import __HOME_DIR_DATA__
from constants import __HOME_DIR_RESAMPLE_DATA__

################## begin for date/time process ########################################
# get datetime in log format
def __get_log_time_format__():
        t = datetime.datetime.now().replace(tzinfo=None)
        in_time = t.strftime('%Y-%m-%d %H:%M:%S')
        return str(in_time)

################## end for date/time process ########################################

################## begin for download process ########################################
#------------------- extract and aggregate h5 files ---------------------------------#
# extract all data files
def __extract_tar_file__(flog, directory):
        tarfiles = [f for f in os.listdir('.') if os.path.isfile(f) and f.endswith('.tar')]
        for f in tarfiles:
                try:
                        ar = tarfile.open(f)
                        ar.extractall(directory)
                        ar.close()
                        flog.write("[{0}] successfully extracted {1}\n".format(__get_log_time_format__(), f))
                except tarfile.TarError as e:
                        flog.write("[{0}] Error while extracting {1}: {2}\n".format(__get_log_time_format__(), f, str(e)))
        return 1

# extract all gzip files
def __extract_gz_file__(flog, directory):
        gzfiles = [f for f in os.listdir('.') if os.path.isfile(f) and f.endswith('.gz')]
        for f in gzfiles:
                gzif = gzip.open(f, 'rb')
                s = gzif.read()
                gzif.close()
                fname = f[:-3]
                # store uncompressed file data from 's' variable
                open(fname, 'w').write(s)
                os.remove(f)
	flog.write("[{0}] successfully extracted all gzip files\n".format(__get_log_time_format__()))
        return 1

# compare file with file array (for use in __remove_unused_files__())
def __compare_file_array__(filename, array):
        for f in array:
                if filename == f:
                        return 1
        return 0

# remove all files that are out of favorite areas
def __remove_unused_files__(flog, directory, ref_file):
        ref = open(ref_file, "r" )
        files = []
        for line in ref:
                files.append(line[:-1])
        ref.close()
        h5files = [f for f in os.listdir('.') if os.path.isfile(f) and f.endswith('.h5')]
        for f in h5files:
                check = __compare_file_array__(f, files)
                if check == 0:
                        os.remove(f)
	flog.write("[{0}] successfully remove all unused files\n".format(__get_log_time_format__()))
        return 1

# check bounding 
def __check_match_image__(coor_check_La, coor_check_Lo, north, south, east, west):
	if coor_check_Lo >= west and coor_check_Lo <= east:
		if coor_check_La >= south and coor_check_La <= north:
			return 1
	return 0

# remove precisely all file out of favorite area
def __remove_precise__(flog, directory):
	check = [0,0,0,0,0,0,0,0]
	count = 0
	h5files = [f for f in os.listdir('.') if os.path.isfile(f) and f.endswith('.h5') and f.lower().startswith("gaero")]
	for f in h5files:

		lad = "HDF5:" + f + "://All_Data/VIIRS-Aeros-EDR-GEO_All/Latitude"
		lod = "HDF5:" + f + "://All_Data/VIIRS-Aeros-EDR-GEO_All/Longitude"
		coors = __get_bounding_coor__(lad, lod)
		check[0] = __check_match_image__(coors[0], coors[3], __NORTH__, __SOUTH__, __EAST__, __WEST__)
		check[1] = __check_match_image__(coors[0], coors[2], __NORTH__, __SOUTH__, __EAST__, __WEST__)
		check[2] = __check_match_image__(coors[1], coors[3], __NORTH__, __SOUTH__, __EAST__, __WEST__)
		check[3] = __check_match_image__(coors[1], coors[2], __NORTH__, __SOUTH__, __EAST__, __WEST__)
		check[4] = __check_match_image__(__NORTH__, __EAST__, coors[0], coors[1], coors[2], coors[3])
		check[5] = __check_match_image__(__NORTH__, __WEST__, coors[0], coors[1], coors[2], coors[3])
		check[6] = __check_match_image__(__SOUTH__, __EAST__, coors[0], coors[1], coors[2], coors[3])
		check[7] = __check_match_image__(__SOUTH__, __WEST__, coors[0], coors[1], coors[2], coors[3])
		
		for i in check:
			if i == 1:
				count = 1
		if count == 0:
			os.remove(f)
			os.remove(__get_vaooo_fname__(f[21:25], f[30:34]))
		count = 0
	flog.write("[{0}] successfully remove all out of range files, end processing here\n".format(__get_log_time_format__()))

# get file from start time and end time
def __get_vaooo_fname__(start_time, end_time):
        h5files = [f for f in os.listdir('.') if os.path.isfile(f) and f.endswith('.h5') and f.lower().startswith("vaooo")]
        for f in h5files:
                stime = f[21:25]
                etime = f[30:34]
                if stime == start_time and etime == end_time:
                        return f
        return ""

# aggregate gaero and vaooo files into one
def __aggregate_gaero_vaooo__(fgaero, fvaooo):
        # aggregate all data first
        faggre = "GAERO-" + fvaooo
        os.system("h5copy -p -i " + fvaooo + " -o " + faggre + " -s " + "//All_Data/VIIRS-Aeros-EDR_All" + " -d " + "//All_Data/VIIRS-Aeros-EDR_All")
        os.system("h5copy -p -i " + fgaero + " -o " + faggre + " -s " + "//All_Data/VIIRS-Aeros-EDR-GEO_All" + " -d " + "//All_Data/VIIRS-Aeros-EDR-GEO_All")
        # aggregate data products
        os.system("h5copy -p -i " + fvaooo + " -o " + faggre + " -s " + "//Data_Products/VIIRS-Aeros-EDR" + " -d " + "//Data_Products/VIIRS-Aeros-EDR")
        os.system("h5copy -p -i " + fgaero + " -o " + faggre + " -s " + "//Data_Products/VIIRS-Aeros-EDR-GEO" + " -d " + "//Data_Products/VIIRS-Aeros-EDR-GEO")
        return 1

# aggregate all files in directory
def __aggregate__(flog, directory):
        h5files = [f for f in os.listdir(directory) if os.path.isfile(f) and f.endswith('.h5') and f.lower().startswith("gaero")]
        for gaero in h5files:
                vaooo = __get_vaooo_fname__(gaero[21:25], gaero[30:34])
                __aggregate_gaero_vaooo__(gaero, vaooo)
		if os.path.isfile(gaero) and os.path.isfile(vaooo):
                        os.remove(gaero)
                        os.remove(vaooo)
        flog.write("[{0}] successfully aggregated all gearo and it's matching vaooo file into one\n".format(__get_log_time_format__()))

# remove files
def __remove_files__(directory, date):
	f = open("log.txt", "a")
	__extract_tar_file__(f, directory)
	__extract_gz_file__(f, directory)
	__remove_unused_files__(f, directory, date)
	__remove_precise__(f, directory)
	__aggregate__(f, directory)
	f.close()

#------------------------------------------------------------------------------------#
# convert julian to date
def __julian_to_date__(julian):
        F, I = math.modf(float(julian))
        I = int(I)
        A = math.trunc((I - 1867216.25)/36524.25)
        if I > 2299160:
                B = I + 1 + A - math.trunc(A / 4.)
        else:
                B = I
        C = B + 1524
        D = math.trunc((C - 122.1) / 365.25)
        E = math.trunc(365.25 * D)
        G = math.trunc((C - E) / 30.6001)
        day = C - E + F - math.trunc(30.6001 * G)
        if G < 13.5:
                month = G - 1
        else:
                month = G - 13

        if month > 2.5:
                year = D - 4716
        else:
                year = D - 4715
	if len(str(int(month))) == 1:
		month = '0' + str(int(month))
	else:
		month = str(int(month))
	if len(str(int(day))) == 1:
		day = '0' + str(int(day))
	else:
		day = str(int(day))

        return str(int(year)) + str(month) + str(day), str(int(year))

# return day number in range (0-16) 
def __calculate_number_in_period__(julian):
	num = (julian - __JULIAN_MOCK__ + 1) % 16
	if num == 0:
		num = 16
	return num

# time overpass on different day
def __fill_value__(_TIME_):
	for i in range(0,16):
		_TIME_.append([])

	# assign values of each day to approriate array
	# day 1
	_TIME_[0].append("05310547")
	_TIME_[0].append("07120728")
	_TIME_[0].append("18021818")
	_TIME_[0].append("19431958")
	# day 2
	_TIME_[1].append("05120528")
	_TIME_[1].append("06540709")
	_TIME_[1].append("17431759")
	_TIME_[1].append("19251941")
	# day 3
	_TIME_[2].append("04550510")
	_TIME_[2].append("06350649")
	_TIME_[2].append("17261741")
	_TIME_[2].append("19061923")
	# day 4
	_TIME_[3].append("06170632")
	_TIME_[3].append("17121723")
	_TIME_[3].append("18491902")
	# day 5
	_TIME_[4].append("05570613")
	_TIME_[4].append("18281844")
	# day 6
	_TIME_[5].append("05380554")
	_TIME_[5].append("07190733")
	_TIME_[5].append("18101826")
	_TIME_[5].append("19502003")
	# day 7
	_TIME_[6].append("05190531")
	_TIME_[6].append("07020715")
	_TIME_[6].append("17521807")
	_TIME_[6].append("19331946")
	# day 8
	_TIME_[7].append("05020516")
	_TIME_[7].append("06410657")
	_TIME_[7].append("17331747")
	_TIME_[7].append("19121928")
	# day 9
	_TIME_[8].append("04450457")
	_TIME_[8].append("06220638")
	_TIME_[8].append("17151728")
	_TIME_[8].append("18531910")
	# day 10
	_TIME_[9].append("06030617")
	_TIME_[9].append("18351852")
	# day 11
	_TIME_[10].append("05460600")
	_TIME_[10].append("07250740")
	_TIME_[10].append("18171831")
	_TIME_[10].append("19562008")
	# day 12
	_TIME_[11].append("05250541")
	_TIME_[11].append("07060722")
	_TIME_[11].append("17561812")
	_TIME_[11].append("19371953")
	# day 13
	_TIME_[12].append("05060519")
	_TIME_[12].append("06480701")
	_TIME_[12].append("17391753")
	_TIME_[12].append("19191933")
	# day 14
	_TIME_[13].append("04500502")
	_TIME_[13].append("06300644")
	_TIME_[13].append("17201733")
	_TIME_[13].append("19011915")
	# day 15
	_TIME_[14].append("06090625")
	_TIME_[14].append("18401858")
	# day 16
	_TIME_[15].append("05510607")
	_TIME_[15].append("07320743")
	_TIME_[15].append("18231839")

# add count
def __add_count__(step):
	count = "000"
	if step >= 10:
		count = count + str(step)
	else:
		count = count + "0" + str(step)
	return count

# get aqquired time from file name 
def __get_acquired_time_from_filename__(filename = "", typef = "aot"):
	start_time = ""
        end_time = ""
	interval = ""
	if filename == "":
		return ""
	if typef == "aot":
		start_time = filename[21:25]
		end_time = filename[30:34]	
	elif typef == "geo":
		start_time = filename[21:25]
		end_time = filename[30:34]
	else:
		return ""	
	interval = start_time + end_time
	return interval

# get date from filename
def __get_date__(filename = ""):
	date = ""
	if filename == "":
		return 0
	date = filename[11:19]
	return date

# get start time and end time from total time
def __get_start_time__(time):
	return int(time[0:4])
def __get_end_time__(time):
	return int(time[4:8])

################## end for download process #######################################
################## begin for resampling and insertion data ############################## 
# get bounding coordinates
def __get_bounding_coor__(lad, lod):

	gran1 = gdal.Open(lad, GA_ReadOnly)
	gran4 = gdal.Open(lod, GA_ReadOnly)

	latitude = np.array(gran1.ReadAsArray())
	longitude = np.array(gran4.ReadAsArray())

	la = np.ma.masked_equal(latitude, -999.3, copy=False)
	lo = np.ma.masked_equal(longitude, -999.3, copy=False)

	coors = []
	coors.append(np.amax(la))
	coors.append(np.amin(la))
	coors.append(np.amax(lo))
	coors.append(np.amin(lo))

	return coors

# get other filename corresponding to original hdf5 file based on extension, this function is used in __get_other_filenames__ function
def __get_other_filename__(forg, directory, extension):
        out = ""
        files = [f for f in os.listdir(directory) if os.path.isfile(f) and f.endswith(extension)]
        for f in files:
                if forg[0:44] == f[0:44]:
                        out = f
                        break
        return out

# get all other filenames corresponding to original hdf5 file
def __get_other_filenames__(forg, directory):
        png = __get_other_filename__(forg, directory, "convert.png")
        pngori = __get_other_filename__(forg, directory, "thumbnail.png")
        meta = __get_other_filename__(forg, directory, "metadata.txt")
        corner = __get_other_filename__(forg, directory, "corner.txt")
	tiff = __get_other_filename__(forg, directory, ".TIFF")
        return png, pngori, meta, corner, tiff

# move npp aot to saving directory
def __move_to_save_dir__(srcdir, dstdirorg, dstdirres):
	h5files = [f for f in os.listdir(srcdir) if os.path.isfile(f) and f.endswith('.h5')]
	for f in h5files:
		try:
			idir = dstdirorg + "/" + f[:-3]
			png, pngori, meta, corner, tiff = __get_other_filenames__(f, srcdir)
			if png != "" and pngori != "" and meta != "" and corner != "" and tiff != "":
				os.makedirs(idir)
				os.system("mv {0}/{1} {2}/".format(srcdir, f, idir))
				os.system("mv {0}/{1} {2}/".format(srcdir, png, idir))
				os.system("mv {0}/{1} {2}/".format(srcdir, pngori, idir))
				os.system("mv {0}/{1} {2}/".format(srcdir, meta, idir))
				os.system("mv {0}/{1} {2}/".format(srcdir, corner, idir))
				os.system("mv {0}/{1} {2}/".format(srcdir, tiff, idir))
		except Exception as e:
			print str(e)
			return 0

	resfiles = [f for f in os.listdir(srcdir) if os.path.isfile(f) and f.endswith('.tif')]
	for f in resfiles:
		try:
			resdir = dstdirres + "/" + f[:-4]
			os.makedirs(resdir)
			os.system("mv {0}/{1} {2}/".format(srcdir, f, resdir))
		except Exception as e:
			print str(e)
			return 0
	print "successfully move hdf and resample files to corressponding directories"
	return 1


# make dir for saving images in same year
def __make_year_dir__(year):
	ydir_org = __HOME_DIR_DATA__ + year 
	ydir_res = __HOME_DIR_RESAMPLE_DATA__ + year
	if not os.path.exists(ydir_org):
    		os.makedirs(ydir_org)
		os.chmod(ydir_org , 0777)
	if not os.path.exists(ydir_res):
    		os.makedirs(ydir_res)
		os.chmod(ydir_res , 0777)

################## end for resampling and insertion data ############################## 

