package crawler;

import helper.IOFile;

import java.net.URL;
import java.net.URLConnection;
import java.util.ArrayList;
import java.util.Calendar;
import java.util.GregorianCalendar;

import org.htmlcleaner.CleanerProperties;
import org.htmlcleaner.HtmlCleaner;
import org.htmlcleaner.TagNode;

import databaseQueries.DatabaseQueries;

import utilities.UtilityKindKeyword;
import entity.Channel;

public class CrawlerVTC {
	private final String CONFIG_FILE = "data/vtc/Config.txt";
	private final int SEQUENCE = 3;
	
	private String DES_FOLDER;
	private String CHANEL_FILE_NAME;
	private String INFOR_FILE_NAME;

	// input schedule infor

	private String PUBLISHER;
	private String URL_SCHEDULE_PAGE;
	private String TEMP_URL_CHANEL;
	private String TAG_HTML;

	private ArrayList<Channel> arrayChanel;

	private IOFile ioFile;

	private UtilityKindKeyword kindKeyword;

	public CrawlerVTC() {
		/* these comments use when run on FSoft desktop to pass proxy */
		/*
		 * System.getProperties().put("http.proxyHost", "HL-proxyA");
		 * System.getProperties().put("http.proxyPort", "8080");
		 * System.getProperties().put("http.proxyUser", "antq");
		 * System.getProperties().put("http.proxyPassword", "0123999123");
		 */
		ioFile = new IOFile();
		getConfig();
		arrayChanel = new ArrayList<Channel>();
		kindKeyword = new UtilityKindKeyword();
	}

	/*
	 * Step -1: read config data
	 */
	public void getConfig() {
		ArrayList<String> arrayCofig = ioFile.readFileToArray(CONFIG_FILE, -1);
		DES_FOLDER = arrayCofig.get(0).split(": ")[1].trim();
		CHANEL_FILE_NAME = arrayCofig.get(1).split(": ")[1].trim();
		INFOR_FILE_NAME = arrayCofig.get(2).split(": ")[1].trim();
		// System.out.println("Config:\nDES_FOLDER:  "+DES_FOLDER
		// +"\nCHANEL_FILE_NAME: "+CHANEL_FILE_NAME+
		// "\n INFOR_FILE_NAME: "+INFOR_FILE_NAME);
	}

	/* End Step -1: read config data */

	/*
	 * Step 0: read schedule page information
	 */
	public boolean getSchedulePageInfor() {
		ArrayList<String> arrayContent = ioFile.readFileToArray(
				INFOR_FILE_NAME, -1);
		int countField = 0;
		try {
			countField = Integer.parseInt(arrayContent.get(0));
			PUBLISHER = arrayContent.get(1).split(": ")[1].trim();
			URL_SCHEDULE_PAGE = arrayContent.get(2).split(": ")[1].trim();
			TEMP_URL_CHANEL = arrayContent.get(3).split(": ")[1].trim();
			TAG_HTML = arrayContent.get(4).split(": ")[1].trim();
			// System.out.println(PUBLISHER+"\n"+URL_SCHEDULE_PAGE);
		} catch (Exception ex) {
			return false;
		}
		if (arrayContent.size() < countField + 1) {
			return false;
		}

		return true;
	}

	/* end Step 0: read schedule page information */

	/*
	 * Step1: read channel URL to crawl from txt file
	 */
	public void getChannels() {
		// 1. lay danh sach cac kenh
		String contentChannel = ioFile.readFile(CHANEL_FILE_NAME);
		String[] arrayContentChannel = contentChannel.split("\n");
		for (int i = 1; i < arrayContentChannel.length; i += 3) {
			String nameChannel = arrayContentChannel[i];
			String kindChannel = arrayContentChannel[i + 1];
			// template of URL Channel: Regax URL + Date
			//ex:http://www1.vtc.com.vn/modules/process.php?option=epg&channel_id=1&date=21/01/2014
			String URLChannel = arrayContentChannel[i + 2];
			String publisher = PUBLISHER;
			Channel ch = new Channel(nameChannel, kindChannel, URLChannel,
					publisher);
			arrayChanel.add(ch);
		}
	}

	public void storeListChannelDatabase() {
		for (int i = 0; i < arrayChanel.size(); i++) {
			Channel ch = arrayChanel.get(i);
			DatabaseQueries dbq = new DatabaseQueries();
			if (dbq.checkChannelNotExist(ch.getName(), ch.getPublisher())) {
				dbq.insertTableChannel(ch.getName(), ch.getURL_Schedule(),
						ch.getKind(), ch.getPublisher(), ch.getDescription());

			}
		}
	}
	/* end Step 1: get channel URL to crawl */

	/* 
	 * Step 2: crawl data from Internet 
	 * 
	 * */
	public void crawl(Channel channel) {
		// crawl lich chieu cho 1 kenh xac dinh(VTC1,HBO,..)
		String result = "";
		try {
			CleanerProperties props = new CleanerProperties();
			// set some properties to non-default values
			props.setTranslateSpecialEntities(true);
			props.setTransResCharsToNCR(true);
			props.setOmitComments(true);
			for (int i = 0; i < SEQUENCE; i++) {
				String getDate = getNowDate(i);
				String dayOfWeek = getDate.split(",")[0].trim();
				String date = getDate.split(",")[1].trim();
				URL channelURL = new URL(channel.getURL_Schedule() + date);
				// clean html tag : automatic repair error tags
				TagNode rootNode = new HtmlCleaner(props).clean(channelURL);
				/* get all tag in html file *** */
				TagNode[] items = rootNode.getAllElements(true);
				// System.out.println("sssssd " + items.length);
				result +=  getDate +"\n";
				for (TagNode item : items) {
					if (item.getName().equals(TAG_HTML)) {
						result += nomalizeText(item.getText().toString());
					}
				}
			}
			 System.out.println(result);
			// Save File
			/*
			if (result != "") {
				ioFile.Write2File(
						DES_FOLDER + "/schedule/Schedule_" + channel.getName()
								+ ".txt", result, false);
				// System.out.println("Save File "+channel.getName()+".txt"+" Sucess!");
			}*/
		} catch (Exception ex) {
			ex.printStackTrace();
		}
	}

	public void processCrawl(){
		if(checkURLScheduleConnection(getURL_SCHEDULE_PAGE())){
			System.out.println("Check connection OK");
			// thu thap lich chieu cua tung kenh
			System.out.println("Crawling....");
			for(int i=0;i < arrayChanel.size();i++){
				Channel ch = arrayChanel.get(i);
				crawl(ch);
			}	
			System.out.println("Crawl Success!");
		}else{
			System.out.println("URL Die");
		}
	}
	/*end Step 2: crawl data from internet*/
	
	public static void main(String[] args) {
		CrawlerVTC crawlerVTC = new CrawlerVTC();
		crawlerVTC.getSchedulePageInfor();
		// crawlerVTC.crawl();		
	}

	 /* 
     * Support function 
     * 
     * */
	public boolean checkURLScheduleConnection(String URLSchedule){
    	//check URL exist ??
    	URL url;
		try {
			url = new URL(URLSchedule);
			URLConnection conn = url.openConnection();
			if(conn.getInputStream() != null){
			    return true;
			}
		} catch (Exception ex) {
			// TODO Auto-generated catch block
			//ex.printStackTrace();
		}
       return false;
    }
	public String getURL_SCHEDULE_PAGE(){
		return URL_SCHEDULE_PAGE;
	}
	public String getNowDate(int increaseDate){
		String[] dayOfWeeks = { "CHỦ NHẬT", "THỨ HAI", "THỨ BA", "THỨ TƯ",
				"THỨ NĂM", "THỨ SÁU", "THỨ BẢY" };
		GregorianCalendar gCalendar = new GregorianCalendar();
		gCalendar.add(Calendar.DATE, increaseDate);
		int nowYear = gCalendar.get(Calendar.YEAR);
		int nowMonth = gCalendar.get(Calendar.MONTH) + 1;
		String nowMonthString = String.valueOf(nowMonth);
		if (nowMonth < 10) {
			nowMonthString = "0" + nowMonthString;
		}
		int nowDate = gCalendar.get(Calendar.DATE);
		String nowDateString = String.valueOf(nowDate);
		if (nowDate < 10) {
			nowDateString = "0" + nowDateString;
		}
		int dayOfWeek = gCalendar.get(Calendar.DAY_OF_WEEK);
		String nowDayString = nowDateString + "/" + nowMonthString + "/"
				+ nowYear;
		String result = dayOfWeeks[dayOfWeek - 1] +","+ nowDayString;
	    return result;
	}
	public String nomalizeText(String text) {
		String result;
		result = text.trim();
		result = result.replaceAll("null", "");
		result = result.replaceAll("Lịch phát sóng -", "");
		result = result.replaceAll("&nbsp;", "null");
		result = result.replaceAll("&amp;", "-");
		result = result.replaceAll("  ", "");
		result = result.replaceAll("\\s+\\n", " \n");
		result = result.replaceAll("\\n+", "\n");
		result = result.trim();
		return result;
	}
	/* end Support function */
}
