from torch.utils.data import DataLoader, Dataset
import matplotlib.pyplot as plt
import torch
from torchvision import transforms
import numpy as np
from sklearn.preprocessing import scale
from sklearn.preprocessing import minmax_scale
from sklearn.preprocessing import robust_scale
from sklearn.preprocessing import normalize
from itertools import islice
import os
import csv
import re
from datetime import datetime
from args import read_args
import pandas as pd

args = read_args()

if_constrained = args.if_constrained

class EUAData(Dataset):
	def __init__(self, data_f_dir='/home/hlf/code/code_LSTM+CNN/data_set/' + if_constrained + '/hours.csv',
				 data_l_dir='/home/hlf/code/code_LSTM+CNN/data_set/' + if_constrained +'/days.csv',
				 data_img_f_dir = '/home/hlf/code/code_LSTM+CNN/data_set/img_embed.txt'):


		def nnormalization(data_raw):
			count = 0
			data_raw_list = []
			for i in range(1, len(data_raw)):
				date_str = data_raw[i].strip().split(',')[0]
				date = datetime.strptime(date_str, "%Y/%m/%d %H:%M")
				date_b = datetime(2021, 11, 1, 7, 0, 0)
				date_e = datetime(2022, 5, 31, 21, 0, 0)
				if date > date_b and date < date_e:
					if date.hour == 13:
						count += 1
						# print(count)
						continue
				tmp = data_raw[i].strip().split(',')[1:]
				tmp = list(map(float, tmp))
				data_raw_list.append(tmp[:])
			data_raw_list = np.array(data_raw_list)
			# data_list = normalize(data_raw_list)
			# data_list = scale(data_raw_list)
			if if_constrained == 'constrained':
				data_list = scale(data_raw_list)
			else:
				return data_raw_list
			return data_list

		def nnormalization_fordays(data_raw):
			data_raw_list = []
			for i in range(1, len(data_raw)):
				tmp = data_raw[i].strip().split(',')[1:]
				tmp = list(map(float, tmp))
				data_raw_list.append(tmp[:])
			data_raw_list = np.array(data_raw_list)
			if if_constrained == 'constrained':
				data_list = scale(data_raw_list)
			else:
				return data_raw_list
			return data_list

		def str2float(lista):
			for i in range(len(lista)):
				lista[i] = float(lista[i])
			return lista

		self.step_size = 27
		f_data_f = open(data_f_dir, 'r')
		f_data_l = open(data_l_dir, 'r')
		f_data_img_f = open(data_img_f_dir, 'r')
		_data_f = f_data_f.readlines()
		_data_l = f_data_l.readlines()
		data_f_test = nnormalization(_data_f)
		if args.if_constrained == 'unconstrained':
			data_f_d_test = nnormalization_fordays(_data_l)
		self.data_f = []
		data_f_d = []
		self.data_l = []
		self.data_img_f = []
		# for i in range(1, len(_data_f) - self.step_size, 10):
		# 	tmp = []
		# 	for j in range(self.step_size):
		# 		ttmp = _data_f[i + j].strip().split(',')[1:]
		# 		ttmp = list(map(float, ttmp))
		# 		tmp.append(ttmp)
		# 	# tmp.append(_data[i+self.step_size].strip().split(',')[1:5])
		# 	# assert(len(tmp) == self.step_size + 1)
		# 	self.data_f.append(tmp)

		# 处理数值特征
		for i in range(0, len(data_f_test) - self.step_size, 9):
			tmp = []
			for j in range(self.step_size):
				ttmp = data_f_test[i + j]
				tmp.append(ttmp)
			self.data_f.append(tmp)

		if args.if_constrained == 'unconstrained':
			for i in range(0, len(data_f_d_test) - 3, 1):
				tmp = []
				for j in range(3):
					ttmp = data_f_d_test[i + j]
					tmp.append(ttmp)
				data_f_d.append(tmp)

			for i in range(len(self.data_f)):
				for j in data_f_d[i]:
					for k in range(10):
						self.data_f[i].append(j)

		# print(len(_data_l))
		# 处理标签文件
		for i in range(4, len(_data_l)):
			tmp = _data_l[i].strip().split(',')[1:]
			self.data_l.append(tmp[:4])

		# 处理image特征
		for line in islice(f_data_img_f, 0, None):
			line = line.split(' ')[1:]
			self.data_img_f.append(str2float(line))



	# def __getitem__(self, index):
	# 	fn = self.data[index]
	# 	dat = fn[:-1]
	# 	dat = [list(map(float, sub_dat)) for sub_dat in dat]
	# 	dat = torch.Tensor(dat)
	# 	label = fn[-1]
	# 	label = list(map(float, label))
	# 	label = torch.Tensor(label)
	# 	return dat, label

	def download_data(self):
		return self.data_f, self.data_img_f, self.data_l

	def __len__(self):
		return len(self.data_f)


class TimeSeriesDataset(Dataset):
	def __init__(self, x, img_x, y):
		self.x = x.astype(np.float32)
		self.img_x = img_x.astype(np.float32)
		self.y = y.astype(np.float32)

	def __len__(self):
		return len(self.x)

	def __getitem__(self, idx):
		return (self.x[idx], self.img_x[idx], self.y[idx])



if __name__ == "__main__":
	test = EUAData()
	# print(len(test.data_f))
	# print(len(test.data_l))
	print(test.__getitem__(1)[0])
	print('ok')
