﻿using System;
using System.Collections.Generic;
using System.Linq;
using System.Text;
using System.Data.SqlClient;
using System.Data;

namespace Craweler
{
    /// <summary>
    /// A lightweight crawler, take the seed url as input and crawl the pages.
    /// </summary>
    class Program
    {
        static void Main(string[] args)
        {
            ExtractAllUrlFromCurrentHtml();
        }

        static void PrintUsage()
        {
            Console.WriteLine("Crawler.exe <domainUrl> <>");
            return;
        }

        #region Extract Urls

        static void ExtractAllUrlFromCurrentHtml()
        {
            SqlConnection sqlConn = new SqlConnection();
            sqlConn.ConnectionString = @"Data Source=gene-f\bandik;database=CrawlPages;integrated security=SSPI";
            sqlConn.Open();

            SqlConnection sqlConn2 = new SqlConnection();
            sqlConn2.ConnectionString = @"Data Source=gene-f\bandik;database=CrawlPages;integrated security=SSPI";
            sqlConn2.Open();

            SqlCommand getElement = new SqlCommand();
            getElement.CommandType = CommandType.Text;
            getElement.Connection = sqlConn;
            string selectString= @"select * from webpages where contenttype like 'text/html%'";
            getElement.CommandText = selectString;
            string url = string.Empty;
            string ip = string.Empty;
            string contentType = string.Empty;
            string cachedContent = string.Empty;
            byte[] buf = new byte[1024*1024*10]; 
            Encoding encoding = Encoding.Default;
            long len =0;
            int i = 0;


            SqlDataReader sdr= getElement.ExecuteReader();
            while (sdr.Read())
            {
                url = sdr.GetString(0);
                if (url.Length > 200) continue;
                if (url.EndsWith(".txt")) continue;
                if (GetDomainUrl(url) == "dada360.com") continue;

                Console.WriteLine(i++);

                ip = sdr.GetString(1);
                contentType = sdr.GetString(2).ToLower();
                if(string.IsNullOrEmpty(contentType)) continue;
                if(contentType.Contains("utf-8"))
                {
                    encoding = Encoding.UTF8;
                }
                else if(contentType.Contains("iso-8859-1"))
                {
                    encoding = Encoding.ASCII;
                }
                else if(contentType.Contains("gb2312"))
                {
                    encoding = Encoding.GetEncoding("gb2312");
                }
                len = sdr.GetBytes(3,0,buf,0,1024 * 1024 * 10);
                cachedContent = encoding.GetString(buf,0,(int)len);
                string domainUrl = GetDomainUrl(url);
                if (domainUrl == string.Empty)
                {
                    Console.WriteLine("unexpected url: " + url);
                }
                string[] ss= URLinkExtractor.ExtractLinks(url, cachedContent, domainUrl);
                foreach (string s in ss)
                {
                    if (!CheckUrlExists(s, sqlConn2) && (s.EndsWith("html") || s.EndsWith("htm")))
                    {
                        InsertNewUrlForCrawel(s, ip, sqlConn2);
                    }
                }
            }
            sqlConn.Close();
            sqlConn2.Close();
        }

        private static string[] domains = new string[]{"360kxr.com"
,"jxdyf.com"
,"yaofang.cn"
,"818.com"
,"star365.com"
,"daoyao.com"
,"jianke.com"
,"818shyf.com"
,"huihao.com"
,"dada360.com"
,"bishengyuan.com"
,"yumi100.com"
,"kangtu.com"
,"5.cc"
,"sonmai.cn"
,"boheshop.com"
,"yeecare.com"
,"lvshou.com"
,"99vk.com"
,"haoshili.com.cn"};

        static string GetDomainUrl(string url)
        {
            foreach (string s in domains)
            {
                if (url.Contains(s))
                    return s;
            }
            return string.Empty;


        }

        static void InsertNewUrlForCrawel(string url,string ip, SqlConnection conn)
        {
            SqlCommand sqlcom = new SqlCommand();
            sqlcom.Connection = conn;
            sqlcom.CommandType = CommandType.Text;
            string insertStringT = "insert into FastUrlTable(url,ip,hasdoc) values('{0}','{1}',0)";
            sqlcom.CommandText = string.Format(insertStringT, url, ip);
            try
            {
                sqlcom.ExecuteNonQuery();
            }
            catch (Exception e)
            {
                Console.WriteLine(e.Message);
            }
        }

        static bool CheckUrlExists(string url, SqlConnection conn)
        {
            SqlCommand checkElement = new SqlCommand();
            checkElement.CommandType = CommandType.Text;
            checkElement.Connection = conn;
            string checkSqlStringT = "select count(*) from fasturltable where url='{0}'";
            int result = 0;

            checkElement.CommandText = string.Format(checkSqlStringT, url);
            try
            {
                result = (int)checkElement.ExecuteScalar();
            }
            catch (Exception e)
            {
                Console.WriteLine(e.Message);
                return false;
            }
            if (result > 0) return true;
            return false;
        }

        #endregion

        #region FetchUrlContent



        #endregion
    }
}
