﻿module Spider

open System
open System.Collections.Concurrent
open System.Collections.Generic
open System.Diagnostics
open System.IO
open System.IO.IsolatedStorage
open System.Net
open System.Threading
open System.Text.RegularExpressions
open System.Windows.Controls
open System.Reflection
open System.Windows.Markup
open System.Windows
open UrlProcessor
open Wpf

module private Helpers =

    // Agent communication.
    type Message =
        | Url of string option
        | Done
        | Stop
        | Mailbox of MailboxProcessor<Message>

    // Gates the number of crawling agents.
    [<Literal>]
    let Gate = 5
    
    /// Canonicalizes a URL (example.com -> http://www.example.com).
    let canonicalize (url : string) =
        let domPat = "[^\.]+\.\w{2,3}(\.\w{2})?"
        let url' = Uri.TryCreate(url, UriKind.Absolute)
        let uri =
            match url' with
            | true, str -> Some str
            | _ -> let url'' = Uri.TryCreate("http://" + url, UriKind.Absolute)
                   match url'' with
                   | true, str -> Some str
                   | _ -> None
        match uri with
        | Some x ->
            let host = x.Host
            let path = x.AbsolutePath
            let host' = Regex(domPat, RegexOptions.RightToLeft).Match(host).Value
            let pattern = "(?i)^https?://((www\.)|([^\.]+\.))" + Regex.Escape(host') + "[^\"]*"
            let m = Regex(pattern).IsMatch(string x)
            match m with
            | true -> "http://" + host + path
            | false -> "http://www." + host + path
        | None -> ""

    /// Saves data (html, Web headers, response time) in isolated storage.
    let saveData project url html headers time =
        
        let write path (str : string) =
            use stream = new IsolatedStorageFileStream(path, FileMode.Create, store)
            use writer = new StreamWriter(stream)
            writer.Write str

        let url' = hash url |> string
        let path = Path.Combine(project, url')
        let dir = store.CreateDirectory(path)
        let path1 = Path.Combine(project, url', "html.txt")
        let path2 = Path.Combine(project, url', "headers.txt")
        let path3 = Path.Combine(project, url', "time.txt")

        store.CreateDirectory url'
        write path1 html
        write path2 headers
        write path3 time
    
    // Takes a URL and returns the corresponding HTMl, Web response headers and response time.
    let fetchData (url : string) =
        try
            let sw = Stopwatch.StartNew()
            let req = WebRequest.Create url :?> HttpWebRequest
            req.UserAgent <- "Mozilla/5.0 (Windows; U; MSIE 9.0; Windows NT 9.0; en-US)"
            req.Timeout <- 5000
            req.ReadWriteTimeout <- 5000
            use resp = req.GetResponse()
            let headers = resp.Headers
            let headers' =
                [|
                    let keys = headers.Keys
                    for x in keys do
                        yield x + "," + headers.Get x
                |]
            let headers'' = String.Join("\n", headers')
            let content = resp.ContentType
            let isHtml = Regex("(?i)html").IsMatch(content)
            match isHtml with
            | true ->
                use stream = resp.GetResponseStream()
                stream.ReadTimeout <- 5000
                use reader = new StreamReader(stream)
                let html = reader.ReadToEnd()
                sw.Stop()
                let time = sw.ElapsedMilliseconds |> string
                Some(html, headers'', time)
            | false -> None
        with _ -> None

    /// Gets directories disallowed by robots.txt.
    let robotsDisallow host =
            try
                let client = new WebClient()
                let str = client.DownloadString("http://" + host + "/robots.txt")
                let str' = str.Split '\n'
                let str''=
                    [
                        for x in str' do
                            let reg = Regex("(?i)disallow:(.*)").Match(x)
                            if reg.Success then
                                yield reg.Groups.[1].Value.Trim()
                    ]
                match str''.IsEmpty with
                | false ->
                    let pat = String.Join("|", str'')
                    Some <| "(" + pat + ")"
                | true -> None
            with
            | _ -> None

open Helpers

let crawl project url depth limit =
    // Progress window.
    let prog = 
        let file = Assembly.GetExecutingAssembly().GetManifestResourceNames() |> Array.find (fun x -> x = "progress.xaml")
        use stream = Assembly.GetExecutingAssembly().GetManifestResourceStream(file)
        XamlReader.Load(stream) :?> Window
    prog.Owner <- navWind
    prog.Show()

    // Concurrent queue for saving collected urls.
    let q = ConcurrentQueue<string>()

    // Hash set for saving crawled URLs.
    let set = HashSet<string>()

    // Capture the GUI synchronization context.
    let context = SynchronizationContext.Current

    let url' = canonicalize url
    let url'' = hash url' |> string
    let path = Path.Combine(project, url'', "urls.txt")
    let path' = Path.Combine(project, url'')
    let dir = store.CreateDirectory path'
    let urls = store.CreateFile path
    let writer = new StreamWriter(urls)
    writer.AutoFlush <- true
    let uri = new Uri(url')
    let uri' = string uri
    let segs = uri.Segments.Length
    let host = uri.Host
    let robots = robotsDisallow host
    let domPat = "[^\.]+\.\w{2,3}(\.\w{2})?"
    let host' = Regex(domPat, RegexOptions.RightToLeft).Match(host).Value

    let extractInternalLinks html =
 
        let pattern1 = "(?i)href\\s*=\\s*(\"|\')/?((?!#.*|/\B|mailto:|location\.|javascript:)[^\"\'\#]+)(\"|\'|\#)"
        let pattern2 = "(?i)^https?://[^\"]*"
        let pattern3 = "(?i)https?://" + Regex.Escape(host') + "[^\"]*"
        let pattern4 = "(?i)^https?://((www\.)|([^\.]+\.))?" + Regex.Escape(host') + "[^\"]*"
 
        let absolute, relative =
            [
                for x in Regex(pattern1).Matches(html) do
                    yield x.Groups.[2].Value
            ]
            |> List.partition (fun x -> Regex(pattern2).IsMatch(x))
 
        let relative' = relative |> List.map (fun x -> uri' + x)
        let absolute' = absolute |> List.filter (fun x -> Regex(pattern4).IsMatch(x))
                                 |> List.map (fun x -> Regex("[^\#]+").Match(x).Value) 
        
        let intLinks = absolute' @ relative' |> List.map (fun x -> let m = Regex(pattern3).IsMatch(x)
                                                                   match m with
                                                                   | true -> Regex("(?i)https?://").Replace(x, "http://www.")
                                                                   | false -> x)
                                             |> List.map (fun x -> new Uri(x))
                                             |> List.filter (fun x -> x.Segments.Length < (segs + depth + 1))
                                             |> List.map (fun x -> x.ToString())
                                             |> Seq.distinct
                                             |> Seq.toList
        match robots with
        | Some pat ->
            intLinks |> List.filter (fun x -> Regex(pat, RegexOptions.Compiled).IsMatch(x) = false)
        | None ->
            intLinks

    /// Supervises crawling agents.
    let supervisor =
        MailboxProcessor.Start(fun x ->
            let rec loop (run : bool) =
                async {
                    let! msg = x.Receive()
                    match msg with
                    | Mailbox(mailbox) -> 
                        let count = set.Count
                        let ok = count < limit && run
                        match ok with
                        | true ->
                            let url = q.TryDequeue()
                            match url with
                            | true, str -> 
                                let b = set.Contains str
                                match b with
                                | false -> 
                                    set.Add str |> ignore
                                    mailbox.Post <| Url(Some str)
                                    return! loop run
                                | true -> 
                                    mailbox.Post <| Url None
                                    return! loop run

                            | _ ->
                                mailbox.Post <| Url None
                                return! loop run
                        | false ->
                            mailbox.Post Stop
                            return! loop run

                    | Stop -> return! loop false

                    | _ -> 
                        writer.Close()
                        urls.Close()
                        let stream = new IsolatedStorageFileStream(path, FileMode.OpenOrCreate, store)
                        let reader = new StreamReader(stream)
                        let lines =
                            [
                                while not reader.EndOfStream do
                                    yield reader.ReadLine()
                            ]
                        let crawlerFailed = lines.IsEmpty
                        do! Async.SwitchToContext context
                        match crawlerFailed with
                        | false ->
                            let genItem url =
                                let item = ListBoxItem(Content = url, Height=25.)
                                item.PreviewMouseLeftButtonDown |> Event.add (fun _ ->
                                    let content = item.Content |> string
                                    processUrl project content context |> Async.Start)
                                item
                            let items = lines |> List.map (fun x -> genItem x)
                            box.ItemsSource <- items
                            prog.Close()
                            navWind.NavigationService.Navigate page2 |> ignore
                        | true -> 
                            prog.Close()
                            showMsg "The crawler failed to find any HTML pages, check your Internet connection or make sure the input URL was valid."                   
                        (x :> IDisposable).Dispose()
                }
            loop true)

    /// Receives urls from crawling agents and Controls the supervisor disposal.
    let urlCollector =
        MailboxProcessor.Start(fun y ->
            let rec loop count =
                async {
                    let! msg = y.TryReceive(7000)
                    match msg with
                    | Some message ->
                        match message with
                        | Url u ->
                            match u with
                            | Some url -> q.Enqueue url
                                          return! loop count
                            | None -> return! loop count
                        | _ ->
                            match count with
                            | Gate -> supervisor.Post Done
                                      (y :> IDisposable).Dispose()
                            | _ -> return! loop (count + 1)
                    | None -> supervisor.Post Stop
                              return! loop count
                }
            loop 1)
    
    /// Initializes a crawling agent.
    let crawler id =
        MailboxProcessor.Start(fun inbox ->
            let rec loop() =
                async {
                    let! msg = inbox.Receive()
                    match msg with
                    | Url x ->
                        match x with
                        | Some url -> 
                            let data = fetchData url
                            match data with
                            | Some (html, headers, time) -> 
                                writer.WriteLine url
                                saveData project url html headers time
                                let links = extractInternalLinks html
                                for link in links do
                                    urlCollector.Post <| Url (Some link)
                                supervisor.Post <| Mailbox(inbox)
                                return! loop()
                            | None ->
                                supervisor.Post <| Mailbox(inbox)
                                return! loop()
                        | None ->
                            supervisor.Post <| Mailbox(inbox)
                            return! loop()
                    | _ ->
                        urlCollector.Post Done
                        (inbox :> IDisposable).Dispose()
                }
            loop())

    // Spawn the crawlers.
    let crawlers = 
        [
            for i in 1 .. Gate do
                yield crawler i
        ]
    
    // Post the first messages.
    urlCollector.Post <| Url (Some url')
    crawlers |> List.iter (fun ag -> ag.Post <| Url None)