﻿using System;
using System.Collections.Generic;
using System.Linq;
using System.Text;
using BMallDBAccess;
using Common;
using System.Data;
using MySql.Data.MySqlClient;
using System.Collections;
using System.Diagnostics;

namespace SpiderServer
{
	class Configs
	{
		private static Dictionary<int, CrawlJobInfo> s_JobInfoMap = new Dictionary<int, CrawlJobInfo>();
		private static Dictionary<int, CrawlJobFileFieldInfo[]> s_FileFieldInfoMap = new Dictionary<int, CrawlJobFileFieldInfo[]>();
		private static CrawlJobInfo InternalGetCrawlJobById(int jobId)
		{
			CrawlJobInfo job = null;
			using(IDataReader dr = Global.DBAccess.ExecuteReader("select * from crawl_job where job_id=@job_id", new MySqlParameter("job_id", jobId)))
			{
				if(dr.Read())
				{
					job = CrawlJobInfo.FillData(dr);
				}
			}

			if(job != null)
			{
				using(IDataReader dr = Global.DBAccess.ExecuteReader("select * from crawl_job_file where job_id=@job_id", new MySqlParameter("job_id", jobId)))
				{
					job.Files = CrawlJobFileInfo.FillDataCollection(dr);
				}
				return job;
			}
			return null;
		}

		public static CrawlJobFileInfo GetCrawlFileById(int fileId)
		{
			CrawlJobFileInfo file = null;
			using (IDataReader dr = Global.DBAccess.ExecuteReader("select * from crawl_job_file where file_id=@file_id", new MySqlParameter("file_id", fileId)))
			{
				if (dr.Read())
				{
					file = CrawlJobFileInfo.FillData(dr);
				}
			}
			return file;
		}

		public static CrawlJobInfo[] GetStartCrawlJobs()
		{
			using (IDataReader dr = Global.DBAccess.ExecuteReader("select * from crawl_job where status in(1)"))
			{
				return CrawlJobInfo.FillDataCollection(dr);
			}
		}

		public static string GetPageData(string url, string encoding)
		{
			return GetPageData(url, 0, encoding);
		}

		public static string GetPageData(string url, int fileId, string encoding)
		{
			object obj = Global.DBAccess.ExecuteScalar("select data from crawl_data_page where hash=@hash", new MySqlParameter("hash", url.GetHashCode()));
			string data = obj != null ? obj.ToString() : null;
			if (data == null)
			{
				Stopwatch sw = new Stopwatch();
				sw.Start();
				HttpURLConnectionWrapper http = new HttpURLConnectionWrapper(url);
				data = http.GetWeb(url, encoding, null);
				sw.Stop();
				if (data != null)
				{
					int hash = url.GetHashCode();
					Global.DBAccess.ExecuteNonQuery("insert into crawl_data_page(hash,url,cpu_time,data,created,modified) values(@hash,@url,@cpu_time,@data,@created,@modified)",
						new MySqlParameter("hash", hash),
						new MySqlParameter("url", url),
						new MySqlParameter("cpu_time", sw.Elapsed.TotalMilliseconds),
						new MySqlParameter("data", data),
						new MySqlParameter("created", DateTime.Now),
						new MySqlParameter("modified", DateTime.Now)
					);

					if (fileId > 0)
					{
						Global.DBAccess.ExecuteNonQuery("insert into crawl_job_page_log(hash,file_id) values(@hash,@file_id)", 
							new MySqlParameter("hash", hash),
							new MySqlParameter("file_id", fileId)
						);
					}
				}
			}
			return data;
		}

		public static CrawlJobInfo GetCrawlJobById(int jobId)
		{
			lock ((s_JobInfoMap as ICollection).SyncRoot)
			{
				CrawlJobInfo job;
				if (s_JobInfoMap.TryGetValue(jobId, out job))
				{
					return job;
				}

				job = InternalGetCrawlJobById(jobId);
				s_JobInfoMap.Add(jobId, job);
				return job;
			}
		}

		private static CrawlJobFileFieldInfo[] InternalGetCrawlFileFieldsByFileId(int fileId)
		{
			using (IDataReader dr = Global.DBAccess.ExecuteReader("select * from crawl_file_field where file_id=@file_id", new MySqlParameter("file_id", fileId)))
			{
				return CrawlJobFileFieldInfo.FillDataCollection(dr);
			}
		}

		public static CrawlJobFileFieldInfo[] GetCrawlFileFieldsByFileId(int fileId)
		{
			lock ((s_FileFieldInfoMap as ICollection).SyncRoot)
			{
				CrawlJobFileFieldInfo[] fields;
				if (s_FileFieldInfoMap.TryGetValue(fileId, out fields))
				{
					return fields;
				}

				fields = InternalGetCrawlFileFieldsByFileId(fileId);
				s_FileFieldInfoMap.Add(fileId, fields);
				return fields;
			}
		}

		public static void UpdateJob(int jobId)
		{
			s_JobInfoMap.Remove(jobId);
		}

		public static void UpdateJobFile(int fileId)
		{
			s_FileFieldInfoMap.Remove(fileId);
		}
	}
}
