﻿Imports HtmlAgilityPack
Imports System.Data.SQLite
Imports System.ComponentModel
Imports System.Collections.ObjectModel
Imports System.Text.RegularExpressions
Imports System.Text
Imports System.Diagnostics
Imports System.Threading.Tasks
Imports System.Threading

Namespace OfflineLibrary_WPF
	Module Pages

		Private _dbSyncLock As New Object

		Public DownloadQueue As New Concurrent.ConcurrentQueue(Of Uri)
		'this is for managing which URLs we'll get for the 'download all' feature
		Public urlsToGet As New Concurrent.ConcurrentQueue(Of Uri)

		Public PagesDownloaded As New ObservableCollection(Of String)

		Public ReadOnly IgnoreLinks As String() = {"/apps/", "/sydp/", "/pref/", "/sysl/", "/sybv/", "/sydt/", "/pag/"}

		Public Function DropURLFragment(URL As Uri) As Uri
			Return New Uri(URL.AbsoluteUri.Split("#")(0).Split("?")(0))
		End Function

		Public Function GetPage(URL As Uri, LocalFileLocation As IO.DirectoryInfo, bgDownloader As BackgroundWorker) As HtmlDocument
			Return GetPage(URL, LocalFileLocation, bgDownloader, BrowseMode)
		End Function

		Public Function GetPage(URL As Uri, LocalFileLocation As IO.DirectoryInfo, bgDownloader As BackgroundWorker, BrowseMode As BrowseModes) As HtmlDocument

			URL = DropURLFragment(URL)

			Dim htmlPage As New HtmlDocument

			If BrowseMode = BrowseModes.Offline OrElse BrowseMode = BrowseModes.OfflineGetLinkedPages OrElse BrowseMode = BrowseModes.OfflineGetMissingPage Then

				htmlPage = GetPageOffline(URL, LocalFileLocation)

			End If

			If BrowseMode = BrowseModes.Online OrElse BrowseMode = BrowseModes.OnlineGetLinkedPages OrElse ((BrowseMode = BrowseModes.OfflineGetLinkedPages OrElse BrowseMode = BrowseModes.OfflineGetMissingPage) AndAlso htmlPage.DocumentNode.ChildNodes.Count = 0) Then
				htmlPage = DownloadPage(URL, LocalFileLocation)
				'this is necessary because, for some reason, the above htmlPage can't be modified by the following code to add some JavaScript and CSS
				htmlPage = GetPageOffline(URL, LocalFileLocation)
			End If

			If htmlPage.DocumentNode.ChildNodes.Count = 0 Then
				htmlPage.LoadHtml(String.Format("<html><body>I don't have that page saved for offline reading.<br /><br /><a href='{0}'>{0}</a></body><html>", URL.AbsoluteUri))
			End If

			If BrowseMode = BrowseModes.OfflineGetLinkedPages OrElse BrowseMode = BrowseModes.OnlineGetLinkedPages Then
				'AddToDownloadQueue(htmlPage)
				AddToDownloadQueue(URL)
				If Not bgDownloader.IsBusy Then
					bgDownloader.RunWorkerAsync()
				End If
			End If

			'add in JavaScript to enable scrolling to URL fragments and HTML IDs
			Dim body As HtmlNode = htmlPage.DocumentNode.SelectSingleNode("/html/body")
			If body IsNot Nothing Then
				'based loosely on http://stackoverflow.com/a/8923993/271351
				Dim sb As New StringBuilder
				sb.AppendLine("function documentOffsetTop(el) { return el.offsetTop + (el.offsetParent ? documentOffsetTop(el.offsetParent) : 0); }")
				sb.AppendLine("function showIT(elID) {")
				sb.AppendLine("	window.setTimeout(function () {")
				sb.AppendLine("		var el = document.getElementById(elID);")
				sb.AppendLine("		var top = documentOffsetTop(el) - (window.innerHeight / 2);")
				sb.AppendLine("		window.scrollTo(0, top);")
				sb.AppendLine("	});")
				sb.AppendLine("}")

				Dim fragmentNavScript As HtmlNode = htmlPage.CreateElement("script")
				fragmentNavScript.Attributes.Add("type", "text/javascript")
				fragmentNavScript.AppendChild(htmlPage.CreateTextNode(sb.ToString))
				body.AppendChild(fragmentNavScript)
			End If

			Dim head As HtmlNode = htmlPage.DocumentNode.SelectSingleNode("/html/head")
			If head IsNot Nothing Then
				Dim sb As New StringBuilder
				sb.AppendLine("ruby { display: inline-table; vertical-align: top; } ruby * { display: inline; line-height:1.0; text-indent:0; text-align:center; white-space: nowrap; } rb { display: table-row-group; font-size: 100%; } rt { display: table-header-group; font-size: 100%; line-height: 1.1; }")

				Dim fragmentRubyCSS As HtmlNode = htmlPage.CreateElement("style")
				fragmentRubyCSS.Attributes.Add("type", "text/css")
				fragmentRubyCSS.AppendChild(htmlPage.CreateTextNode(sb.ToString))
				head.AppendChild(fragmentRubyCSS)
			End If

			Return LocalizeLinks(htmlPage, LocalFileLocation)

		End Function

		Public Function GetPageOffline(URL As Uri, LocalFileLocation As IO.DirectoryInfo) As HtmlDocument

			URL = DropURLFragment(URL)

			Dim htmlPage As New HtmlDocument
			SyncLock _dbSyncLock
				Using conn As New SQLiteConnection(OfflineDataConnectionString)
					conn.Open()
					Dim sql As String = "SELECT HTML FROM PageData WHERE URL=@URL"
					Using cmd As New SQLiteCommand(sql, conn)

						cmd.Parameters.AddWithValue("@URL", URL.AbsoluteUri)

						Dim pageBytes As Byte() = cmd.ExecuteScalar
						Dim pageHTML As String = If(pageBytes IsNot Nothing, DecompressByteToString(pageBytes), "")
						htmlPage.LoadHtml(If(pageHTML, ""))
					End Using
				End Using
			End SyncLock

			Return htmlPage
		End Function

		Public Function GetPageByForce(URL As Uri, LocalFileLocation As IO.DirectoryInfo) As HtmlDocument

			URL = DropURLFragment(URL)

			Dim htmlPage As New HtmlDocument

			htmlPage = GetPageOffline(URL, LocalFileLocation)

			If htmlPage.DocumentNode.ChildNodes.Count = 0 Then
				htmlPage = DownloadPage(URL, LocalFileLocation)
			End If

			Return htmlPage
		End Function

		'this is for progress bar purposes only
		Public OutstandingLinkedPages As Integer
		Public RetrievedLinkedPages As Integer
		Public Sub GetLinkedPages(URL As Uri, LocalFileLocation As IO.DirectoryInfo)
			Dim htmlPage As HtmlDocument = GetPageByForce(URL, LocalFileLocation)
			Dim aNodes As HtmlNodeCollection = htmlPage.DocumentNode.SelectNodes("//a")
			Dim baseURI As New Uri("http://wol.jw.org/")

			If aNodes IsNot Nothing Then
				'for progress bar only
				Interlocked.Add(Pages.OutstandingLinkedPages, aNodes.Count)
				'Pages.OutstandingLinkedPages += aNodes.Count

				'For p = 0 To aNodes.Count - 1
				'	Dim aNode As HtmlNode = aNodes(p)
				For Each aNode In aNodes
					Dim myURL As New Uri(baseURI, aNode.Attributes("href").Value)

					Dim myPage As HtmlDocument = GetPageByForce(myURL, LocalFileLocation)

					'for progress bar purposes only
					Interlocked.Increment(Pages.RetrievedLinkedPages)
					'Pages.RetrievedLinkedPages += 1
				Next
			End If

		End Sub

		Public Sub GetNestedLinkedPages(htmlPage As HtmlDocument, LocalFileLocation As IO.DirectoryInfo, bgDownloader As BackgroundWorker, Depth As Integer)
			Dim aNodes As HtmlNodeCollection = htmlPage.DocumentNode.SelectNodes("//a")
			Dim baseURI As New Uri("http://wol.jw.org/")

			If aNodes IsNot Nothing Then
				Parallel.ForEach(aNodes, Sub(aNode)
											 'For Each aNode In aNodes
											 Dim myURL As New Uri(baseURI, aNode.Attributes("href").Value)
											 myURL = DropURLFragment(myURL)

											 'the /wol/lv/ part signifies that we are in the navigation level
											 'downloading everything from this level will prevent crazy amounts of downloading, but still get everything for the language
											 If myURL.AbsolutePath.Contains("/wol/lv/") Then
												 If Not urlsToGet.Contains(myURL) Then
													 'we want to enqueue the page and all it's children for download
													 'we want to do this for every page
													 urlsToGet.Enqueue(myURL)

													 Dim myPage As HtmlDocument = GetPageByForce(myURL, LocalFileLocation)
													 GetNestedLinkedPages(myPage, LocalFileLocation, bgDownloader, Depth + 1)
												 End If
											 Else
												 urlsToGet.Enqueue(myURL)
											 End If

											 'Next
										 End Sub)
			End If

			'now that we have a list of all the index URLs, we can queue them up to get one level below them
			If Depth = 0 Then
				'Parallel.ForEach(urlsToGet, Sub(urlToGet)
				'								'For Each urlToGet In urlsToGet
				'								'Dim myPage As HtmlDocument = GetPageByForce(urlToGet, LocalFileLocation)
				'								'AddToDownloadQueue(myPage)
				'								AddToDownloadQueue(urlToGet)
				'								'Next
				'							End Sub)

				'empty the queue because we've already added all the items for processing
				'urlsToGet = New Concurrent.ConcurrentQueue(Of Uri)

				Dim urlToGet As Uri = Nothing
				Do While urlsToGet.TryDequeue(urlToGet)
					AddToDownloadQueue(urlToGet)
				Loop

				If Not bgDownloader.IsBusy Then
					bgDownloader.RunWorkerAsync()
				End If
			End If

		End Sub

		'this would actually download 1 level too deep
		'Public Sub AddToDownloadQueue(htmlPage As HtmlDocument)
		'	Dim aNodes As HtmlNodeCollection = htmlPage.DocumentNode.SelectNodes("//a")
		'	Dim baseURI As New Uri("http://wol.jw.org/")

		'	If aNodes IsNot Nothing Then
		'		For Each aNode In aNodes
		'			Dim myURL As New Uri(baseURI, aNode.Attributes("href").Value)

		'			DownloadQueue.Enqueue(myURL)
		'		Next
		'	End If
		'End Sub

		Public Sub AddToDownloadQueue(URL As Uri)
			DownloadQueue.Enqueue(URL)
		End Sub

		Public Function DownloadPage(URL As Uri, LocalFileLocation As IO.DirectoryInfo) As HtmlDocument
			'make sure that we drop anything after the #
			URL = DropURLFragment(URL)

			Dim returnList As New List(Of KeyValuePair(Of String, String))
			Dim htmlPage As New HtmlDocument

			'exclude non-content pages
			If TestValidURL(URL) Then
				'update the page if it is more than a month old
				'this should be user configurable somewhere
				'If GetLastUpdatedDate(URL).AddMonths(1) < DateTime.Now OrElse ForceUpdate Then

				Dim pageGetter As New HtmlWeb
				Try
					htmlPage = pageGetter.Load(URL.AbsoluteUri)

				Catch ex As Exception
					'there was an issue downloading the page
				End Try

				If htmlPage.DocumentNode.ChildNodes.Count > 0 Then
					SavePage(URL, htmlPage, LocalFileLocation)
				End If

				'End If
			End If

			Return htmlPage
		End Function

		Public Function TestValidURL(URL As Uri) As Boolean
			Dim IsValid As Boolean = True
			For Each ignoreString In IgnoreLinks
				IsValid = IsValid AndAlso Not URL.AbsoluteUri.Contains(ignoreString)
			Next

			Return IsValid
		End Function

		Public Function GetLastUpdatedDate(URL As Uri) As Date
			Dim lastUpdated As Date

			SyncLock _dbSyncLock
				Using conn As New SQLiteConnection(OfflineDataConnectionString)
					conn.Open()
					Dim sql As String = "SELECT LastUpdated FROM PageData WHERE URL=@URL"
					Using cmd As New SQLiteCommand(sql, conn)
						cmd.Parameters.AddWithValue("@URL", URL.AbsoluteUri)

						Dim foundDate As Boolean = Date.TryParse(cmd.ExecuteScalar(), lastUpdated)

						If foundDate Then
							Return lastUpdated
						Else
							Return Date.MinValue
						End If

					End Using
				End Using
			End SyncLock
		End Function

		'Public Function DownloadFileAsString(URL As String) As String
		'	Using client As New System.Net.WebClient
		'		client.Encoding = System.Text.Encoding.UTF8
		'		Dim Page As String = client.DownloadString(URL)
		'		Return Page
		'	End Using
		'End Function

		Public Function DownloadFile(URL As Uri) As Byte()
			Using client As New System.Net.WebClient
				Dim File As Byte() = {0}
				Try
					File = client.DownloadData(URL)
				Catch ex As System.Net.WebException When ex.Message = "The request was aborted: The connection was closed unexpectedly."
					'this is a 404 error in most cases
					'the server closes the connection and redirects
				Catch ex As System.Net.WebException
					'there was an unknown error downloading the file; return a 0-length file
				End Try
				Return File
			End Using
		End Function

		Public Sub FixJS(jsFileInfo As IO.FileInfo)
			Select Case jsFileInfo.Name
				Case "wol.desktop.min.js"
					Dim jsFile As String = IO.File.ReadAllText(jsFileInfo.FullName)
					jsFile = String.Format("{0}{1}{2}", "try{", jsFile, "}catch(e){}")
					'jsFile = jsFile.Replace("JSON.parse(t.localStorage.getItem(""mru""))", "[]")
					'jsFile = jsFile.Replace("JSON.parse(t.localStorage.getItem(""lscache-libInfo""))", "[]")
					'jsFile = jsFile.Replace("if(i.filter(this.href))return f&&r.html(""""),u(this.href),t.history.pushState(null,null,this.href),!1", "")
					'jsFile = jsFile.Replace("function(e,n,r){var i,s=0,o=e.length", "function(e,n,r){var i,s=0,o=0")
					'var t = n('<div style="width:1in;visible:hidden;padding:0px"></div>').appendTo("body"),
					't.contentAreaMargins = {
					'		marginTop: i.outerHeight(),
					'		marginBottom: 0
					'	}

					IO.File.WriteAllText(jsFileInfo.FullName, jsFile)
				Case "home.js"
					Dim jsFile As String = IO.File.ReadAllText(jsFileInfo.FullName)
					jsFile = jsFile.Replace("var url = app.makeDateDataURL(new Date(), 0, true);", "")
					jsFile = jsFile.Replace("if (url) {", "if (false) {")
					'jsFile = jsFile.Replace(" if (url) {", "try{ if (url) {")
					'jsFile = jsFile.Replace("$('#searchField').focus();", "}catch(e){}$('#searchField').focus();")

					IO.File.WriteAllText(jsFileInfo.FullName, jsFile)

			End Select

		End Sub

		Public Sub FixCSS(cssFileInfo As IO.FileInfo, URL As Uri, LocalFileLocation As IO.DirectoryInfo)
			Select Case cssFileInfo.Extension
				Case ".css"
					Dim URLsToReplace As New List(Of KeyValuePair(Of String, String))

					Dim cssFile As String = IO.File.ReadAllText(cssFileInfo.FullName)

					'cssFile = cssFile.Replace("height:1.6em", "")

					Dim urlRegex As New Regex("(?<=url\()[^:]*?(?=\))", RegexOptions.Multiline)
					Dim regexReplace As String = "URL$&"

					For Each cssURLMatch As Match In urlRegex.Matches(cssFile)
						Dim cssURL As String = cssURLMatch.Value.Trim("'")
						Dim fullFileName As String() = cssURL.Split("?")
						'take the part before the ?
						fullFileName = fullFileName(0).Split("/")
						'take only the file name, not the path, from the src
						Dim fileName As String = fullFileName(fullFileName.Length - 1)


						If fileName <> "" Then
							Dim file As New IO.FileInfo(IO.Path.Combine(LocalFileLocation.FullName, fileName))
							If Not file.Exists Then
								Dim scriptSource As New Uri(URL, cssURL)
								Dim fileBytes As Byte() = DownloadFile(scriptSource)
								Using newFile As New IO.FileStream(file.FullName, IO.FileMode.Create)
									newFile.Write(fileBytes, 0, fileBytes.Length)
									PagesDownloaded.AddOnUI(String.Format("Saved: {0} => {1}", scriptSource.AbsoluteUri, file.FullName))
								End Using
							End If

							URLsToReplace.Add(New KeyValuePair(Of String, String)(cssURL, fileName))

						End If

					Next

					For Each URLToReplace In URLsToReplace
						cssFile = cssFile.Replace(URLToReplace.Key, String.Format("'{0}{1}{2}{3}'", "file:///", LocalFileLocation, IO.Path.DirectorySeparatorChar, URLToReplace.Value).Replace(IO.Path.DirectorySeparatorChar, "/"))
					Next

					IO.File.WriteAllText(cssFileInfo.FullName, cssFile)

			End Select
		End Sub

		Public Sub SavePage(URL As Uri, htmlPage As HtmlDocument, LocalFileLocation As IO.DirectoryInfo)

			'we are saving twice
			'1 - the entire page compressed
			'2 - in a different table with full-text search enabled for later searching

			SyncLock _dbSyncLock
				Dim pageToSave As String = PreparePageForSave(URL, htmlPage, LocalFileLocation)

				Dim pageBytesToSave As Byte() = CompressStringToByte(pageToSave)
				Dim pageSearchTextToSave As String = GetSearchTermsFromPage(htmlPage)
				Dim pageTitleToSave As String = GetTitleFromPage(htmlPage)

				Using conn As New SQLiteConnection(OfflineDataConnectionString)
					conn.Open()
					Using trans As SQLiteTransaction = conn.BeginTransaction

						Using cmd As New SQLiteCommand(conn)

							cmd.CommandText = "INSERT OR REPLACE INTO PageData (URL, HTML, LastUpdated) VALUES (@URL, @HTML, @LastUpdated)"
							cmd.Parameters.AddWithValue("@URL", URL.AbsoluteUri)
							cmd.Parameters.AddWithValue("@HTML", pageBytesToSave)
							cmd.Parameters.AddWithValue("@LastUpdated", DateTime.Now.ToUniversalTime)

							cmd.ExecuteNonQuery()

						End Using

						Using cmd As New SQLiteCommand(conn)
							cmd.CommandText = "INSERT OR REPLACE INTO PageSearchData (URL, PageTitle, PageText, LastUpdated) VALUES (@URL, @PageTitle, @PageText, @LastUpdated)"
							cmd.Parameters.AddWithValue("@URL", URL.AbsoluteUri)
							cmd.Parameters.AddWithValue("@PageTitle", pageTitleToSave)
							cmd.Parameters.AddWithValue("@PageText", pageSearchTextToSave)
							cmd.Parameters.AddWithValue("@LastUpdated", DateTime.Now.ToUniversalTime)

							cmd.ExecuteNonQuery()

						End Using

						trans.Commit()

					End Using
				End Using
			End SyncLock

			PagesDownloaded.AddOnUI(String.Format("Saved: {0} => {1}", URL.AbsoluteUri, OfflineDatabaseFileInfo.FullName))
		End Sub

		Public Function PreparePageForSave(URL As Uri, htmlPage As HtmlDocument, LocalFileLocation As IO.DirectoryInfo) As String
			'find and fix all the 'script' URLs to point to the local file system
			htmlPage = ParameterizeLinks(URL, htmlPage, LocalFileLocation, "//script", "src")

			'find and fix all the 'img' URLs to point to the local file system
			htmlPage = ParameterizeLinks(URL, htmlPage, LocalFileLocation, "//img", "src")
			'these href tags actually point to images
			htmlPage = ParameterizeLinks(URL, htmlPage, LocalFileLocation, "//div[@class='image']//a", "href")

			''find and fix all the jw.org data-src values
			'htmlPage = ParameterizeLinks(URL, htmlPage, LocalFileLocation, "//div", "data-src")

			'find and fix all the 'link' URLs to point to the local file system
			htmlPage = ParameterizeLinks(URL, htmlPage, LocalFileLocation, "//link[not(@rel) or @rel!='alternate']", "href")

			'fix links that are too specific
			htmlPage = GeneralizeLinks(htmlPage)

			Return htmlPage.DocumentNode.OuterHtml
		End Function

		Public Function ParameterizeLinks(URL As Uri, htmlPage As HtmlDocument, LocalFileLocation As IO.DirectoryInfo, XPathNodesToFix As String, Attribute As String) As HtmlDocument
			Dim pageNodes As HtmlNodeCollection = htmlPage.DocumentNode.SelectNodes(XPathNodesToFix)

			'*****************************
			'the script names could include a ?, which is an invalid file name in Windows.
			'Have to find a way to fix that without causing collisions.
			'It appears that the part after the ? is not needed to get the file accurately.
			'The following code will work even if there is no ? in the file name
			'*****************************
			If pageNodes IsNot Nothing Then
				For Each pageNode In pageNodes
					Dim workingAttribute As HtmlAttribute = pageNode.Attributes(Attribute)

					If workingAttribute IsNot Nothing Then
						'fix for jw.org assets
						'If workingAttribute.Value.StartsWith("assets/") OrElse workingAttribute.Value.StartsWith("themes/") Then
						'	workingAttribute.Value = String.Format("/{0}", workingAttribute.Value)
						'End If

						Dim fullFileName As String() = workingAttribute.Value.Split("?")
						'take the part before the ?
						fullFileName = fullFileName(0).Split("/")
						'take only the file name, not the path, from the src
						Dim fileName As String = fullFileName(fullFileName.Length - 1)

						'make sure this file is downloaded to that location
						If fileName <> "" Then
							Dim file As New IO.FileInfo(IO.Path.Combine(LocalFileLocation.FullName, fileName))
							If Not file.Exists Then
								Dim scriptSource As New Uri(URL, workingAttribute.Value)
								Dim fileBytes As Byte() = DownloadFile(scriptSource)
								Using newFile As New IO.FileStream(file.FullName, IO.FileMode.Create)
									newFile.Write(fileBytes, 0, fileBytes.Length)
									PagesDownloaded.AddOnUI(String.Format("Saved: {0} => {1}", scriptSource.AbsoluteUri, file.FullName))
								End Using

								'fix broken JavaScript
								FixJS(file)

								'fix CSS
								FixCSS(file, URL, LocalFileLocation)
							End If

							'finally, parameterize the URL
							pageNode.Attributes(Attribute).Value = String.Format("{0}{1}{2}", "%LocalFilePath%", IO.Path.DirectorySeparatorChar, fileName)
						End If
					End If
				Next
			End If

			Return htmlPage
		End Function

		Public Function GeneralizeLinks(htmlPage As HtmlDocument) As HtmlDocument

			'chop off the last number in the following links in every page
			Dim specificLink As New Regex("[A-Za-z]/[0-9]{5,}?/[0-9]*?/[0-9]*")

			Dim aNodes As HtmlNodeCollection = htmlPage.DocumentNode.SelectNodes("//a")
			Dim baseURI As New Uri("http://wol.jw.org/")

			If aNodes IsNot Nothing Then
				For Each aNode In aNodes
					If aNode.Attributes("href") IsNot Nothing Then
						Dim hrefAttribute As String = aNode.Attributes("href").Value

						If TestValidURL(New Uri(baseURI, hrefAttribute)) Then

							If specificLink.IsMatch(hrefAttribute) Then
								hrefAttribute = hrefAttribute.Substring(0, hrefAttribute.LastIndexOf("/"))

								aNode.Attributes("href").Value = hrefAttribute
							End If
						End If
					End If

				Next
			End If

			Return htmlPage
		End Function

		Public Function LocalizeLinks(htmlPage As HtmlDocument, LocalFileLocation As IO.DirectoryInfo) As HtmlDocument
			Dim returnPage As New HtmlDocument
			If htmlPage.DocumentNode.ChildNodes.Count > 0 Then
				returnPage.LoadHtml(htmlPage.DocumentNode.OuterHtml.Replace("%LocalFilePath%", String.Format("file:///{0}", LocalFileLocation.FullName)).Replace(IO.Path.DirectorySeparatorChar, "/"))
			End If
			Return returnPage
		End Function






	End Module
End Namespace