﻿using System;
using System.Collections.Generic;
using System.Linq;
using System.Text;
using System.Net;
using System.IO;
using CrawlFish.Parsing;

namespace CrawlFish.Web
{
    public class Crawler
    {
        public Crawler(CrawlConfig config, string outputPath)
        {
            _config = config;
            _outputPath = outputPath;
        }

        public void Crawl(string url)
        {
            Queue<string> links = new Queue<string>();
            Dictionary<string, CrawlResult> results = new Dictionary<string, CrawlResult>();
            links.Enqueue(url);
            while (links.Count > 0 && results.Count < _config.Sanity)
            {
                string target = links.Dequeue();
                if (!results.ContainsKey(target))
                {
                    try
                    {
                        CrawlResult result = _getResult(target);
                        foreach (string link in result.Links)
                        {
                            if (!links.Contains(link))
                            {
                                links.Enqueue(link);
                            }
                        }
                        results.Add(target, result);
                    }
                    catch (WebException)
                    {
                        //maybe re try
                    }
                }
            }

            _processResults(results);
        }

        private void _processResults(Dictionary<string, CrawlResult> results)
        {
            StreamWriter stream = new StreamWriter(_outputPath, false);
            foreach (KeyValuePair<string, CrawlResult> result in results)
            {
                stream.WriteLine(String.Format("url:{0}\t{1}", result.Key, result.Value.GetSerialized()));
            }
            stream.Close();
        }

        private CrawlResult _getResult(string target)
        {
            WebRequest request = HttpWebRequest.Create(target);
            WebResponse response = request.GetResponse();
            StreamReader reader = new StreamReader(response.GetResponseStream());
            string markup = reader.ReadToEnd();
            HtmlParser parser = new HtmlParser(markup);
            CrawlResult result = new CrawlResult();
            result.Content = parser.GetContent();
            result.Links = parser.GetLinks();
            result.Markup = parser.GetMarkup();
            return result;
        }

        private CrawlConfig _config;
        private string _outputPath;
    }
}
