﻿using Microsoft.EntityFrameworkCore;
using Microsoft.EntityFrameworkCore.Metadata.Internal;
using Microsoft.EntityFrameworkCore.Storage.ValueConversion.Internal;
using Microsoft.Identity.Client;
using Microsoft.IdentityModel.Tokens;
using System;
using System.Collections;
using System.Net;
using System.Security.Policy;
using System.Text.RegularExpressions;
using WallpaperCrawler;
using WallpaperDemo.Models;
using static System.Net.WebRequestMethods;

namespace WallpaperDemo
{
    public class CrawlerService
    {
        public int count = 0;
        private string startUrl = "https://www.unsplash.com";

        private string thenUrl1 = "https://unsplash.com/napi/search/photos?query=";
        private string thenUrl2 = "&per_page=20&page=";

        public static readonly string ImagesRegex = @"(srcSet)\s*=\s*[""'](?<url>[^""'#>\,\s]+)";

        public static readonly string ImagesRegexV2 = @"[""']full[""']:[""'](?<url>[^""'#>\,\s]+)";

        public static readonly string ImgPraseRegex = @"https?://(?<host>[\w\d.-]+)/photo-(?<id1>\d+)-(?<id2>\w+).+(q=(\d+))$";

        private List<UrlInfo> urlInfos = new List<UrlInfo>();
        public Hashtable imgsComplete = new Hashtable();
        private Crawler crawler = new Crawler();
        //Crawler myCrawler = new Crawler();
        private readonly WallpapersContext db;

        public CrawlerService(WallpapersContext context)
        {
            count = 0;
            db = context;
            crawler = new Crawler();


            Match m = Regex.Match(startUrl, Crawler.urlParseRegex);
            string s = m.Groups["host"].Value;
            crawler.HostFilter = s + "$";

            crawler.urls.Add(startUrl, false);
            crawler.pending.Enqueue(startUrl);
        }


        public void start()
        {

            new Thread(crawler.Crawl).Start();

            //crawler.CrawUrlEvent += new Crawler.CrawlerUrlEventHandler(SaveUrl);
        }

        public List<UrlInfo> GetOne()
        {
            while (urlInfos.IsNullOrEmpty())
            {
                urlInfos.Concat(PraseOnePage(count));
                count++;
            }
            var q = urlInfos.First();
            //var q = urlInfos.SingleOrDefault(o => o.State == "wait");


            new Thread(() => DownloadPic(q.Url, q.ImgName, "Default")).Start();
            //db.Entry(q).Property(o => o.State).IsModified = true;
            return urlInfos;
        }

        public List<UrlInfo> GetKey(string key)
        {
            Hashtable hh = new Hashtable();
            List<UrlInfo> urls = new List<UrlInfo>();
            int num = 0;
            string searchUrl = "https://unsplash.com/s/photos/" + key;

            string html = "";
            HttpWebRequest Myrq = WebRequest.Create(searchUrl) as HttpWebRequest;
            Myrq.KeepAlive = false;//持续连接
            Myrq.Timeout = 30 * 1000;//30秒，*1000是因为基础单位为毫秒
            Myrq.Method = "GET";//请求方法
            Myrq.Accept = "text/html,application/xhtml+xml,application/xml;q=0.9,image/avif,image/webp,image/apng,*/*;q=0.8,application/signed-exchange;v=b3";//自己去network里面找
            Myrq.UserAgent = "Mozilla/5.0 (Windows NT 10.0; Win64; x64; rv:109.0) Gecko/20100101 Firefox/113.0";

            //接受返回
            HttpWebResponse Myrp = (HttpWebResponse)Myrq.GetResponse();
            if (Myrp.StatusCode != HttpStatusCode.OK)
            { return new List<UrlInfo>(); }

            using (FileStream fl = new FileStream(key, FileMode.Create))
            {
                Myrp.GetResponseStream().CopyTo(fl);

            }
            html = System.IO.File.ReadAllText(key);

            MatchCollection imgs = new Regex(ImagesRegex).Matches(html);
            int a = 0;
            foreach (Match img in imgs)
            {
                //if (a >= numOfPics) break;
                string imgurl = img.Groups["url"].Value;
                Match imgMatch = Regex.Match(imgurl, ImgPraseRegex);
                string name = imgMatch.Groups["id1"].Value;
                if (name == null) continue;
                if (hh.ContainsKey(name)) continue;
                new Thread (() => DownloadPic(imgMatch.Value, name+".jpg", key)).Start();


                UrlInfo urlInfo = new UrlInfo(imgMatch.Value, "wait", name + ".jpg");
                urls.Add(urlInfo);
                hh.Add(name, true);
                a++;
            }
            return urls;
        }

        public List<UrlInfo> DownloadNext(string keyword, int seq)
        {
            Hashtable hh = new Hashtable();
            List<UrlInfo> urls = new List<UrlInfo>();
            int num = 0;
            string searchUrl = thenUrl1 + keyword + thenUrl2 + seq.ToString();

            string html = "";
            HttpWebRequest Myrq = WebRequest.Create(searchUrl) as HttpWebRequest;
            Myrq.KeepAlive = false;//持续连接
            Myrq.Timeout = 30 * 1000;//30秒，*1000是因为基础单位为毫秒
            Myrq.Method = "GET";//请求方法
            Myrq.Accept = "text/html,application/xhtml+xml,application/xml;q=0.9,image/avif,image/webp,image/apng,*/*;q=0.8,application/signed-exchange;v=b3";
            Myrq.UserAgent = "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/113.0.0.0 Safari/537.36";

            //接受返回
            HttpWebResponse Myrp = (HttpWebResponse)Myrq.GetResponse();
            if (Myrp.StatusCode != HttpStatusCode.OK)
            { return new List<UrlInfo>(); }

            using (FileStream fl = new FileStream(keyword+seq.ToString(), FileMode.Create))
            {
                Myrp.GetResponseStream().CopyTo(fl);

            }
            html = System.IO.File.ReadAllText(keyword+seq.ToString());

            MatchCollection imgs = new Regex(ImagesRegexV2).Matches(html);
            int a = 0;
            foreach (Match img in imgs)
            {
                string imgurl = img.Groups["url"].Value;
                Match imgMatch = Regex.Match(imgurl, ImgPraseRegex);
                string name = imgMatch.Groups["id1"].Value;
                if (name == null) continue;
                if (hh.ContainsKey(name)) continue;
                new Thread(() => DownloadPic(imgMatch.Value, name + ".jpg", keyword)).Start();


                UrlInfo urlInfo = new UrlInfo(imgMatch.Value, "wait", name + ".jpg");
                urls.Add(urlInfo);
                hh.Add(name, true);
                a++;
            }
            return urls;
        }

        public List<UrlInfo> PraseOnePage(int seq)
        {
            string html = System.IO.File.ReadAllText(seq.ToString());
            List<UrlInfo> urls = new List<UrlInfo>();

            MatchCollection imgs = new Regex(ImagesRegex).Matches(html);
            //client.DownloadFile(imgs[0].Groups["url"].Value, "head.jpg");
            foreach (Match img in imgs)
            {
                string imgurl = img.Groups["url"].Value;
                Match imgMatch = Regex.Match(imgurl, ImgPraseRegex);
                string name = imgMatch.Groups["id1"].Value;
                if (imgurl == null || imgurl == "") continue;
                if (imgMatch.Value == null || imgMatch.Value == "") continue;
                if (name == null || name == "") continue;
                if (imgsComplete.ContainsKey(name)) continue;

                new Thread (() => DownloadPic(imgMatch.Value, name+".jpg", "Default")).Start();

                UrlInfo urlInfo = new UrlInfo(imgMatch.Value, "wait", name + ".jpg");
                urls.Add(urlInfo);
                imgsComplete.Add(name, true);
            }

            return urls;
        }

        public void DownloadPic(string url, string name, string key)
        {
            WebClient client = new WebClient();
            string photoPath = @"images\\" + key + "\\";
            if (!Directory.Exists(photoPath))
            {
                Directory.CreateDirectory(photoPath);
            }
            try
            {
                client.DownloadFile(url, photoPath+name);
            }
            catch (Exception e)
            {
                return;
            }

        }
    }
}
