﻿using System;
using System.Collections.Generic;
using System.Linq;
using System.Text;
using Crawl;
using System.Net;
using System.Threading;
using System.Text.RegularExpressions;
using Crawl.Ext;
using System.IO;
using System.Reflection;
using Util.Net.Http;

namespace Img.Service
{
    class CrawlService
    {
        public delegate void Log(string msg);

        public static CrawlService Instance { private set; get; }

        private static int CrawlerCount = 4;
        private static int MaxFailerTime_Img = 100000;

        private Job job { get; set; }
        private bool IsInit { get; set; }
        private bool IsStart { get; set; }

        private ProxyService ProxyServ { get; set; }
        private event Log logger;
        private Log log;

        private string ignoreFile;
       
        static CrawlService()
        {
            CrawlService service = new CrawlService();
            if (service != null)
            {
                Instance = service;
            }
        }


        public bool Init(Log log = null)
        {
            if (this.IsInit)
            {
                return true;
            }
            this.job = new Job(new JobContext());
            this.log = log;
            this.logger += log;
            ignoreFile = this.job.Context.Config.MainDir + "/ignore.urls.txt";
            //Context.Jobs.MapQueue("img").AddLasts(imgUrls);
            if (FrmImg.IsReadHis)
            {
                this.logger("读取记录开始");
                this.job.jobSave.LoadJob();    
                if (File.Exists(ignoreFile))
                {
                    IEnumerable<string> ignoredUrls = File.ReadAllLines(ignoreFile);
                    this.job.Context.Jobs.AddIgnoredUrls(ignoredUrls);
                    //this.job.Context.Jobs.MapQueue("img").AddLasts();
                }   
                this.logger("读取记录完毕");

            }
            if (FrmImg.IsUseProxy)
            {
                this.ProxyServ = ProxyService.Instance;
                if (this.ProxyServ == null)
                {
                    return false;
                }
                if (!this.ProxyServ.Init())
                {
                    return false;
                }
            }
            this.IsInit = true;
            return true;
        }

        public bool Destroy()
        {
            if (!this.IsInit)
            {
                return false;
            }
            this.Stop();
            new Thread(delegate()
            {
                this.logger("保存记录开始");
                this.job.jobSave.SaveAll();
                List<string> ignoredUrls = JobSave.GetIgnoredUrls(this.job.Context.Catalog.Dir.FullName+"/www.Img.cn/tiku");
                if (ignoredUrls.Count > 1000)
                {
                    File.WriteAllLines(ignoreFile, ignoredUrls);
                } 
                this.logger("保存记录完毕");
                this.logger -= log;
                this.job = null;
                this.IsInit = false;
            }).Start();                 
            return true;
        }

        public bool Start(string referUrl, string SeedFile, CrawlerEvents crawlerEvents)
        {
            //string SeedFilePath = job.Context.Config.SeedFile;
            if (!this.IsInit)
            {
                return false;
            }
            if (this.IsStart)
            {
                return false;
            }
            if (!File.Exists(SeedFile))
            {
                this.logger("链接文件不存在");
                return false;
            }

            this.IsStart = true;
            new Action(() =>
            {
                this.logger("开始加载链接种子");
               // CrawlJobs.LoadJobFromFile(SeedFile, job.Context.Jobs.MapQueue("img"));
                 string[] urlLines = File.ReadAllLines(SeedFile);
                 if (urlLines.Length < 1) {
                     this.logger("文件为空!!!");
                     return;
                 }
                 CrawlUrl refUrl = new CrawlUrl(referUrl);
                 IEnumerable<string> urlDisticted =  urlLines.Distinct();
                 IEnumerable<CrawlUrl> urls = urlDisticted.Select(t =>
                 {
                     CrawlUrl url = new CrawlUrl(t);
                     url.Referer = refUrl;
                     return url;
                 });

                this.job.Context.Jobs.MapQueue("img").AddLasts(urls);
                int count = this.job.Context.Jobs.MapQueue("img").Count;
                this.logger("链接种子加载完成");
                         
                for (int i = 0; i < FrmImg.ImgCrawlerNum; i++)                
                {   
                    Session session;
                    if(FrmImg.IsUseProxy){
                        session = getNewSession(FrmImg.IsUseProxy, referUrl, false);
                    }else{
                        session=new Session ();
                    }
                    //启动图片爬虫
                    ImgCrawler imgCrawler = this.CreateImageCrawlers(session, crawlerEvents);
                    this.job.Crawlers.Add(imgCrawler);                   
                    imgCrawler.AsynStart();
                    /**********/
                }
            }).BeginInvoke(null, null);
            return true;
        }

        public void Stop()
        {
            if (!this.IsInit)
            {
                return;
            }
            if (!this.IsStart)
            {
                return;
            }      
            this.job.Stop();    
            this.IsStart = false;
        }

        public Session getNewSession(bool isUserProxy, string testUrl,bool isPost )
        {
            Session session = new Session("imgCrawler",new CookieContainer());

            if (isUserProxy)
            {
                WebProxy proxy = getValidWebProxy(testUrl, isPost);
                if (proxy == null)
                {
                    return null;
                }
                session.Proxy = proxy;
            }

            return session;
        }
    
        private WebProxy getValidWebProxy(string testUrl,bool isPost)
        {
            if (!this.IsInit) {
                return null;
            }
            WebProxy proxy = null;
            while (this.IsInit)
            {                
                if ((proxy = this.ProxyServ.GetProxy(5000)) != null)
                {
                    break;
                }
            }      
            HttpClient client = Helper.BuildHttpClient();
            client.Proxy = proxy;
            try
            {
                if (isPost)
                {
                    string reStr = client.PostResponseString(testUrl);
                }
                else
                {
                    string reStr = client.GetResponseString(testUrl);
                }
            }
            catch (Exception e)
            {
                Console.WriteLine("【获取代理失败】|"+e.Message);
                return this.getValidWebProxy(testUrl,isPost);
            }
            Console.WriteLine("【获取代理成功】|");
            return proxy;
        }
 
        public bool EnterPaperSystem(Crawler crawler)
        {
            return false;
        }

        private ImgCrawler CreateImageCrawlers(Session session, CrawlerEvents crawlerEvents)
        {
            ImgCrawler imgCrawler = new ImgCrawler(this.job.Context, this.job.Context.Jobs.MapQueue("img"));
            imgCrawler.CrawlDelay = 100;
            imgCrawler.MaxWaitTime = 360000 * 10;
            imgCrawler.MaxRequestCount = Int32.MaxValue;
            imgCrawler.MaxFailedCount = MaxFailerTime_Img;
            imgCrawler.onCrawl += crawlerEvents.OnCrawl;
            imgCrawler.onStart += crawlerEvents.OnStart;
            imgCrawler.onStop += crawlerEvents.OnStop;
            imgCrawler.Session = session;
            return imgCrawler;
        }
    }


}

