﻿using System;
using System.Collections.Generic;
using System.Linq;
using System.Web;

namespace QSearcher.QServicer.BLL
{
    public class Crawler
    {
        public enum BizType { Request, Download }
        public enum State { Init, RequestLock, RequestResult, AnalyzeLock, AnalyzeResult, DownloadLock, DownloadResult }

        DAL.CrawlerDataContext db = new DAL.CrawlerDataContext();

        public void Init()
        {
            var list = from p in db.Crawler_Url
                       where p.State == (int)State.RequestLock || p.State == (int)State.AnalyzeLock || p.State == (int)State.DownloadLock
                       select p;
            foreach (var item in list)
            {
                if (item.State == (int)State.RequestLock) item.State = (int)State.Init;
                if (item.State == (int)State.AnalyzeLock) item.State = (int)State.RequestResult;
                if (item.State == (int)State.DownloadLock) item.State = (int)State.Init;
                item.UpdateTime = DateTime.Now;
            }
            db.SubmitChanges();
        }

        private void AddUrl(string crawler,BizType bizType, string url)
        {
            var count = (from p in db.Crawler_Url where p.Crawler == crawler && p.Url == url select p).Count();
            if (count > 0) return;
            db.Crawler_Url.InsertOnSubmit(new DAL.Crawler_Url()
            {
                ID = Guid.NewGuid().ToString(),
                BizType = (int)bizType,
                Crawler = crawler,
                CreateTime = DateTime.Now,
                UpdateTime = DateTime.Now,
                State = (int)State.Init,
                Tag = "",
                Url = url,
            });
            db.SubmitChanges();
        }

        public void AddRequestUrl(string crawler, string url)
        {
            AddUrl(crawler, BizType.Request, url);
        }

        public DAL.Crawler_Url GetRequestUrl()
        {
            var list = from p in db.Crawler_Url where p.BizType == (int)BizType.Request && p.State == (int)State.Init select p;
            if (list.Count() <= 0) return new DAL.Crawler_Url() { Code = -1, Msg = "not find." };
            var res = list.First();
            res.Code = 0;
            res.Msg = "success.";
            res.State = (int)State.RequestLock;
            res.UpdateTime = DateTime.Now;
            db.SubmitChanges();
            return res;
        }

        public void SendRequestResult(string id, string tag="")
        {
            var list = from p in db.Crawler_Url
                       where p.ID == id
                       select p;
            foreach (var item in list)
            {
                item.State = (int)State.RequestResult;
                item.Tag = tag;
                item.UpdateTime = DateTime.Now;
            }
            db.SubmitChanges();
        }

        public void AddDownloadUrl(string crawler, string url)
        {
            AddUrl(crawler, BizType.Download, url);
        }

        public DAL.Crawler_Url GetDownloadUrl()
        {
            var list = from p in db.Crawler_Url where p.BizType == (int)BizType.Download && p.State == (int)State.Init select p;
            if (list.Count() <= 0) return new DAL.Crawler_Url() { Code = -1, Msg = "not find." };
            var res = list.First();
            res.Code = 0;
            res.Msg = "success.";
            res.State = (int)State.DownloadLock;
            res.UpdateTime = DateTime.Now;
            db.SubmitChanges();
            return res;
        }

        public void SendDownloadResult(string id, string tag="")
        {
            var list = from p in db.Crawler_Url
                       where p.ID == id
                       select p;
            foreach (var item in list)
            {
                item.State = (int)State.DownloadResult;
                item.Tag = tag;
                item.UpdateTime = DateTime.Now;
            }
            db.SubmitChanges();
        }

        public DAL.Crawler_Url GetAnalyzeSource(string crawler)
        {
            var list = from p in db.Crawler_Url
                       where p.Crawler == crawler && p.BizType == (int)BizType.Request && p.State == (int)State.RequestResult && p.Tag == ""
                       select p;
            if (list.Count() <= 0) return new DAL.Crawler_Url() { Code = -1, Msg = "not find." };
            var res = list.First();
            res.Code = 0;
            res.Msg = "success.";
            res.State = (int)State.AnalyzeLock;
            res.UpdateTime = DateTime.Now;
            db.SubmitChanges();
            return res;
        }

        public void SendAnalyzeResult(string id, string[] requestUrls, string[] downloadUrls, string tag = "")
        {
            var list = from p in db.Crawler_Url where p.ID == id select p;
            var sender = list.First();
            sender.State = (int)State.AnalyzeResult;
            sender.Tag = tag;
            sender.UpdateTime = DateTime.Now;
            if (requestUrls != null)
                foreach (var url in requestUrls) AddRequestUrl(sender.Crawler, url);
            if (downloadUrls != null)
                foreach (var url in downloadUrls) AddDownloadUrl(sender.Crawler, url);
            db.SubmitChanges();
        }
    }
}