﻿using System;
using System.Collections.Generic;
using System.IO;
using System.Linq;
using System.Net;
using System.Text;
using MiniCrawler.Logging;

namespace MiniCrawler
{
    class Webpage
    {
        /// <summary>
        /// Get web page return code HTML
        /// </summary>
        /// <param name="strURL"></param>
        /// <returns></returns>
        public static string GetWebPage(string strURL)
        {
            try
            {

                //stores the html returned by the server
                string strResult = "";

                //build the http request
                HttpWebRequest webRequest;
                webRequest = (HttpWebRequest)WebRequest.Create(strURL);

                IWebProxy proxy = WebRequest.GetSystemWebProxy();
                proxy.Credentials = CredentialCache.DefaultCredentials;

                webRequest.Proxy = proxy;
                webRequest.Method = "GET";
                webRequest.UserAgent = "Mozilla/5.0 (Windows NT 6.1; WOW64; rv:2.0.1) Gecko/20100101 Firefox/4.0.1";
                //webRequest.UserAgent = "MiniCrawler // Crawling Activity // ";
                webRequest.Timeout = -1;

                //initiate contact with the server
                HttpWebResponse webResponse;
                webResponse = (HttpWebResponse)webRequest.GetResponse();

                Log.WriteLog("[RESPONSE LENGTH][" + webResponse.ContentLength + "]");
                

                //download the HTML
                using (StreamReader sr = new StreamReader(webResponse.GetResponseStream()))
                {
                    //read the html
                    strResult = sr.ReadToEnd();
                    // Close and clean up the StreamReader
                    sr.Close();
                }

                return strResult;
               
            }
            catch (Exception)
            {

                throw;
            }
        }

        /// <summary>
        /// Method GetPage
        /// </summary>
        /// <param name="url"></param>
        /// <returns></returns>
        public static string GetPage(string url)
        {
            try
            {

                string page;
                WebRequest myWebRequest;
                WebResponse myWebResponse;

                myWebRequest = WebRequest.Create(url);
                myWebResponse = myWebRequest.GetResponse();//Returns a response from an Internet resource

                Stream streamResponse = myWebResponse.GetResponseStream();//return the data stream from the internet
                //and save it in the stream

                StreamReader sreader = new StreamReader(streamResponse);//reads the data stream
                page = sreader.ReadToEnd();//reads it to the end

                streamResponse.Close();
                sreader.Close();
                myWebResponse.Close();

                return page;

            }
            catch (Exception)
            {

                throw;
            }
        }
    }
}
