﻿using System;
using System.Collections.Generic;
using System.Linq;
using System.Text;
using Crawler.Model;
using Crawler.DataAccess;
using Husb.Framework;
using System.IO;
using System.Collections.Specialized;
using ICSharpCode.SharpZipLib.Zip;
using System.Text.RegularExpressions;
using System.Net;
using Ionic.Zlib;

namespace Crawler.BusinessActions
{
    public class WebPage : BusinessManagerBase<WebPageInfo, WebPageDao>
    {
        public const string htmlHeader = @"<!DOCTYPE html PUBLIC ""-//W3C//DTD XHTML 1.0 Transitional//EN"" ""http://www.w3.org/TR/xhtml1/DTD/xhtml1-transitional.dtd"">
<html xmlns=""http://www.w3.org/1999/xhtml"">
 <head><title> new document </title><meta http-equiv=""Content-Type"" content=""text/html; charset=utf-8""/> </head><body>";
        public const string htmlFooter = "</body></html>";

        public static int GetCountByUrl(String url)
        {
            WebPageDao dao = new WebPageDao();
            return dao.GetCountByUrl(url);
        }

        public static bool GetArticleContent(Guid id, string path)
        {
            WebPageDao dao = new WebPageDao();
            return dao.GetArticleContent(id, path);
        }


        public static void GetFiles(string dir, StringCollection sc)
        {
            // 先获取dir下的所有文件
            sc.AddRange(Directory.GetFiles(dir));

            // 再 获取dir所有直接目录
            string[] dirs = Directory.GetDirectories(dir);
            foreach (string directory in dirs)
            {
                GetFiles(directory, sc);
            }
        }

        public string GetHtmlContent(string html)
        {
            return htmlHeader + html + htmlFooter;
        }

        public byte[] SaveContent(int id, string path)
        {
            byte[] byData = null;
            //throw new NotImplementedException();
            try
            {

                StringCollection sc = new StringCollection();
                GetFiles(path, sc);
                string[] filenames = new string[sc.Count];
                sc.CopyTo(filenames, 0);

                // 'using' statements gaurantee the stream is closed properly which is a big source
                // of problems otherwise.  Its exception safe as well which is great.
                using (MemoryStream ms = new MemoryStream())
                using (ZipOutputStream s = new ZipOutputStream(ms))
                {
                    s.SetLevel(9); // 0 - store only to 9 - means best compression
                    byte[] buffer = new byte[4096];
                    foreach (string file in filenames)
                    {
                        if (file.EndsWith(".bak")) continue;
                        // Using GetFileName makes the result compatible with XP
                        // as the resulting path is not absolute.
                        //ZipEntry entry = new ZipEntry(Path.GetFileName(file));
                        ZipEntry entry = new ZipEntry(file.Remove(0, path.Length));

                        // Setup the entry data as required.

                        // Crc and size are handled by the library for seakable streams
                        // so no need to do them here.

                        // Could also use the last write time or similar for the file.
                        entry.DateTime = DateTime.Now;
                        s.PutNextEntry(entry);

                        using (FileStream fs = File.OpenRead(file))
                        {
                            // Using a fixed size buffer here makes no noticeable difference for output
                            // but keeps a lid on memory usage.
                            int sourceBytes;
                            do
                            {
                                sourceBytes = fs.Read(buffer, 0, buffer.Length);
                                s.Write(buffer, 0, sourceBytes);
                            } while (sourceBytes > 0);
                        }
                    }

                    // Finish is important to ensure trailing information for a Zip file is appended.  Without this
                    // the created file would be invalid.
                    s.Finish();
                    // Close is important to wrap things up and unlock the file.
                    //s.Close();

                    //Database db = DatabaseFactory.CreateDatabase();
                    //DbCommand cmd = db.GetSqlStringCommand(UpdateArticleById);

                    byData = new Byte[s.Length];
                    //s.Position = 0;
                    ms.Position = 0;
                    ms.Read(byData, 0, byData.Length);

                    //db.AddInParameter(cmd, "Article", DbType.Binary, byData);
                    //db.AddInParameter(cmd, "ID", DbType.Int32, id);
                    //affected = db.ExecuteNonQuery(cmd);


                    // Finish/Close arent needed strictly as the using statement does this automatically

                    // Finish is important to ensure trailing information for a Zip file is appended.  Without this
                    // the created file would be invalid.
                    //s.Finish();

                    // Close is important to wrap things up and unlock the file.
                    s.Close();


                }
            }
            catch (Exception ex)
            {
                //Console.WriteLine("Exception during processing {0}", ex);

                // No need to rethrow the exception as for our purposes its handled.
            }

            return byData;
        }


        private string GetResponse(WebResponse resp)
        {
            StringReader txtRdr;

            using (var strm = resp.GetResponseStream())
            {
                var strmRdr = new StreamReader(strm);
                txtRdr = new StringReader(strmRdr.ReadToEnd());
            }

            //
            // This code assumes that the caller will find that
            // name/value pairs are easier to work with via 
            // LINQ to Objects over an IEnumerable collection.
            //

            var responseHeaders = new Dictionary<string, string>();

            foreach (string key in resp.Headers.Keys)
            {
                responseHeaders.Add(key, resp.Headers[key].ToString());
            }

            //ResponseHeaders = responseHeaders;

            return txtRdr.ReadToEnd();
        }

        private string GetPageContent(string url)
        {
            string content = "";

            HttpWebRequest tmpWebRequest = WebRequest.Create(url) as HttpWebRequest;
            WebResponse tmpWebResponse = null;
            try
            {
                tmpWebResponse = tmpWebRequest.GetResponse();
                string html = GetResponse(tmpWebResponse);

                string contentPattern = @"(?<=<div class[\s]*=[\s]*""post""[\s]*>)[\s\S]*?(?=<div id=""post_next_prev"">)";
                Regex tmpRegex = new Regex(contentPattern, RegexOptions.IgnoreCase);
                MatchCollection matches = tmpRegex.Matches(html);
                if (matches.Count > 0)
                {
                    content = matches[0].Value;
                }

                content = GetFirstMatch(contentPattern, html);
            }
            catch
            {

            }
            finally
            {
                if (tmpWebResponse != null)
                {
                    tmpWebResponse.Close();
                }
            }
            return content;
        }

        public List<WebPageInfo> GetNavigationPageHtml(TaskItemInfo taskItem)
        {
            List<WebPageInfo> pages = new List<WebPageInfo>();
            taskItem.CaptureRules = new BindingListView<CaptureRuleInfo>( CaptureRule.GetByMasterId(taskItem.Id));

            if (taskItem.IsNavigation == null || !taskItem.IsNavigation.Value)
            {
                WebPageInfo page = InsertSinglePage(taskItem.Url, taskItem.CaptureRules);
                pages.Add(page);
                return pages;
            }

            if (taskItem.PageCategory == 1)
            {
                string rss = Utilities.GetPageHTML(taskItem.Url);
                MatchCollection rssMatches = GetMatches(@"<link>(?<url>.+?)</link>", rss);
                foreach (Match rssMatch in rssMatches)
                {
                    string href = null;
                    if (rssMatch.Groups.Count > 0)
                    {
                        href = rssMatch.Groups[1].Value;
                    }

                    if (href == null)
                        continue;

                    if (GetCountByUrl(href) > 0)
                    {
                        continue;
                    }
                    //string sss = System.Web.HttpUtility.HtmlDecode(achor);

                    WebPageInfo webPage = CreateWebPage(href, null, taskItem.CaptureRules);
                    WebPageDao dao = new WebPageDao();
                    dao.Insert(webPage, false, null);

                    pages.Add(webPage);
                }
            }
            else
            {
                CaptureRuleInfo ruleNavigation = taskItem.CaptureRules.FirstOrDefault(r => r.Category == 0);

                //string content = Utilities.GetFirstMatch(html, startString, endString);
                string pattern = @"(?<=" + ruleNavigation.StartString + @")[\s\S]*?(?=" + ruleNavigation.EndString + ")"; // <div class=""post_item_body"">    </h3>
                //Regex regex = new Regex(pattern, RegexOptions.IgnoreCase);
                //StringBuilder sb = new StringBuilder();
                //string achorPattern = @"(?<=<a [^>]+?>)[\s\S]*?(?=</a>)";
                string achorHref = @"<a[^>]+href=\s*(?:'(?<href>[^']+)'|""(?<href>[^""]+)""|(?<href>[^>\s]+))\s*[^>]*>(?<text>.*?)</a>";
                string achorPattern = @"(?<=<a [^>]+?>)[\s\S]*?(?=</a>)";
                // 
                // <a[^>]+href=\s*(?:'(?<href>[^']+)'|"(?<href>[^"]+)"|(?<href>[^>\s]+))\s*[^>]*>(?<text>.*?)</a>

                int cnt = 100;
                do
                {
                    string html = Utilities.GetPageHTML(taskItem.Url);
                    MatchCollection matches = GetMatches(pattern, html);
                    foreach (Match match in matches)
                    {
                        string achor = null;
                        string href = null;
                        MatchCollection tmpMatches = GetMatches(achorHref, match.Value.Trim());
                        if (tmpMatches.Count > 0)
                        {
                            if (tmpMatches[0].Groups.Count > 0)
                            {
                                href = tmpMatches[0].Groups[1].Value;
                                achor = tmpMatches[0].Groups[2].Value;
                            }
                        }

                        if (achor == null)
                            continue;

                        if (href == null)
                            continue;

                        href = Utilities.FixUpUrl(taskItem.Url, href);

                        if (GetCountByUrl(href) > 0)
                        {
                            continue;
                        }
                        //string sss = System.Web.HttpUtility.HtmlDecode(achor);

                        WebPageInfo webPage = CreateWebPage(href, achor, taskItem.CaptureRules);
                        WebPageDao dao = new WebPageDao();
                        dao.Insert(webPage, false, null);

                        pages.Add(webPage);
                    }

                    if (!String.IsNullOrEmpty(taskItem.NextPage))
                    {
                        //string nextPageUrlPattern = @"<a[^>]+href=\s*(?:'(?<href>[^']+)'|""(?<href>[^""]+)""|(?<href>[^>\s]+))\s*[^>]*>(?<text>.*?)" + taskItem.NextPage + @"(?<text2>.*?)</a>";
                        string nextPageUrlPattern = @"<a[^>]+href=\s*(?:'(?<href>[^']+)'|""(?<href>[^""]+)""|(?<href>[^>\s]+))\s*[^>]*>" + taskItem.NextPage + @"</a>";
                        MatchCollection nextPageMatches = GetMatches(nextPageUrlPattern, html);
                        if (nextPageMatches.Count > 0)
                        {
                            if (nextPageMatches[0].Groups.Count > 0)
                            {
                                taskItem.NextPageUrl = nextPageMatches[0].Groups[1].Value;
                                taskItem.NextPageUrl = Utilities.FixUpUrl(taskItem.Url, taskItem.NextPageUrl);
                            }
                        }
                    }

                    taskItem.Url = taskItem.NextPageUrl;
                    cnt--;

                } while (!String.IsNullOrEmpty(taskItem.Url) && cnt > 0);
            }

            return pages;
        }

        public List<WebPageInfo> GetNavigationPageHtml(string url, string startString, string endString)
        {
            List<WebPageInfo> pages = new List<WebPageInfo>();
            string html = Utilities.GetPageHTML(url);

            //string content = Utilities.GetFirstMatch(html, startString, endString);
            string pattern = @"(?<=" + startString + @")[\s\S]*?(?=" + endString + ")"; // <div class=""post_item_body"">    </h3>
            //Regex regex = new Regex(pattern, RegexOptions.IgnoreCase);
            //StringBuilder sb = new StringBuilder();
            string achorPattern = @"(?<=<a [^>]+?>)[\s\S]*?(?=</a>)";
            string achorHref = @"(?<=href="")[\s\S]*?(?="" )";

            MatchCollection matches = GetMatches(pattern, html);
            foreach (Match match in matches)
            {
                string achor = GetFirstMatch(achorPattern, match.Value.Trim());
                if (achor == null)
                    continue;

                WebPageInfo webPage = new WebPageInfo();
                webPage.Id = Guid.NewGuid();
                webPage.Name = achor;

                string href = GetFirstMatch(achorHref, match.Value.Trim());
                if (href == null)
                    continue;

                webPage.Url = href;
                webPage.CreatedDate = DateTime.Now;

                if (GetCountByUrl(webPage.Url) > 0)
                {
                    continue;
                }
                webPage.ContentText =  GetPageContent(webPage.Url) + "</div></div>";
                
                webPage.ContentHTML = CompressHtml(webPage.ContentText, webPage.Url);

                WebPageDao dao = new WebPageDao();
                dao.Insert(webPage, false, null);

                pages.Add(webPage);
            }

            return pages;

        }

        public List<WebPageInfo> GetNavigationPageHtml(string url, string startString, string endString, string contentStart, string contentEnd)
        {
            List<WebPageInfo> pages = new List<WebPageInfo>();
            string html = Utilities.GetPageHTML(url);
            if (html.Length == 0)
                return pages;

            //string content = Utilities.GetFirstMatch(html, startString, endString);
            string pattern = @"(?<=" + startString + @")[\s\S]*?(?=" + endString + ")"; // <div class=""post_item_body"">    </h3>
            //Regex regex = new Regex(pattern, RegexOptions.IgnoreCase);
            //StringBuilder sb = new StringBuilder();
            string achorPattern = @"(?<=<a [^>]+?>)[\s\S]*?(?=</a>)";
            //string achorHref = @"(?<=href="")[\s\S]*?(?="" *)";
            string achorHref = @"(?<=href=[""']?)[\s\S]*?(?=[""']?[ |^>])";
            //string a = @"<a[^>]+href=\s*(?:'(?<href>[^']+)'|""(?<href>[^""]+)""|(?<href>[^>\s]+))\s*[^>]*>(?<text>.*?)</a>";

            // <a[^>]+href=\s*(?:'(?<href>[^']+)'|""(?<href>[^""]+)""|(?<href>[^>\s]+))\s*[^>]*>(?<text>.*?)Older Entries(?<text2>.*?)</a>

            // (?<=href=["']?)[\s\S]*?(?=["']?[ |^>])

            //  <a\s+href=["|\']?([^>"\' ]+)["|\']?\s*[^>]*>([^>]+)<\/a>

            //  <a.*?(?: |\\t|\\r|\\n)?href=[\'"]?(.+?)[\'"]?(?:(?: |\\t|\\r|\\n)+.*?)?>(.+?)<\/a.*?>
            //  <a.*?(?: |\\t|\\r|\\n)?href=[\'"]?(.+?)[\'"]?(?:(?: |\\t|\\r|\\n)+.*?)?>(.+?)<\/a.*?>
            // <a.*?>|</a>
            // <\s*a\s.*?href\s*=\s*([\"\'])?(?(1)(.*?)\\1|([^\s\>]+))[^>]*>?(.*?)</a>

            // <link>(?<url>.+?)</link>

            // (?<=href=)([^\>]*)(?=\>)
            //正则：/(?<=href=)([^\>]*)(?=\>)/ 
            //(?<=exp) 匹配exp后面的位置 
            //(?=exp) 匹配exp前面的位置 
            //此正则 匹配 在 href= 之后 “>” 之前 的 非 “>” 的所有字符 

            // <a(.*?)href="(.*?)"(.*?)>(.*?)<\/a>

            MatchCollection matches = GetMatches(pattern, html);
            
            foreach (Match match in matches)
            {
                string title = GetFirstMatch(achorPattern, match.Value.Trim());
                if (title == null)
                    continue;
                string href = GetFirstMatch(achorHref, match.Value.Trim());
                if (href == null)
                    continue;
                href = href.Trim('"', '/', '\'');
                if (GetCountByUrl(href) > 0)
                    continue;

                WebPageInfo webPage = new WebPageInfo();
                webPage.Id = Guid.NewGuid();
                webPage.Name = title;
                webPage.Url = href;
                webPage.CreatedDate = DateTime.Now;
                
                try
                {
                    webPage.ContentText = Utilities.GetPageHTML(webPage.Url, contentStart, contentEnd, true) + "</div></div>";// GetPageContent(webPage.Url) + "</div></div>";
                }
                catch
                { }

                webPage.ContentHTML = Utilities.CompressHtml(webPage.ContentText, webPage.Url);// CompressHtml(webPage.ContentText, webPage.Url);

                WebPageDao dao = new WebPageDao();
                dao.Insert(webPage, false, null);

                pages.Add(webPage);
            }

            return pages;
        }

        public static bool InsertSinglePage(string url,  string startString, string endString)
        {
            WebPageInfo webPage = CreateWebPage(url, null, startString, endString);
            WebPageDao dao = new WebPageDao();
            return dao.Insert(webPage, false, null);
        }

        public static bool InsertSinglePage(string url)
        {
            return InsertSinglePage(url, null, null);
        }

        public static WebPageInfo InsertSinglePage(string url, IEnumerable<CaptureRuleInfo> rules)
        {
            WebPageInfo webPage = CreateWebPage(url, null, rules);
            WebPageDao dao = new WebPageDao();
            dao.Insert(webPage, false, null);



            return webPage;
        }

        private static WebPageInfo CreateWebPage(string url, string title, string startString, string endString)
        {
            WebPageInfo webPage = new WebPageInfo();
            webPage.Id = Guid.NewGuid();
            webPage.Name = title;
            webPage.Url = url;
            webPage.CreatedDate = DateTime.Now;
            try
            {
                if (String.IsNullOrEmpty(title))
                {
                    webPage.ContentText = Utilities.GetPageHTML(webPage.Url, startString, endString, true, out title) + "</div></div>";
                    webPage.Name = title;
                }
                else
                {
                    webPage.ContentText = Utilities.GetPageHTML(webPage.Url, startString, endString, true) + "</div></div>";// GetPageContent(webPage.Url) + "</div></div>";
                }
            }
            catch
            { }
            webPage.ContentHTML = Utilities.CompressHtml(webPage.ContentText, webPage.Url);// CompressHtml(webPage.ContentText, webPage.Url);
            return webPage;
        }

        private static WebPageInfo CreateWebPage(string url, string title, IEnumerable<CaptureRuleInfo> rules)
        {
           
            WebPageInfo webPage = new WebPageInfo();
            webPage.Id = Guid.NewGuid();
            webPage.Name = title;
            webPage.Url = url;
            webPage.CreatedDate = DateTime.Now;
            try
            {
                 CaptureRuleInfo ruleContent = rules.FirstOrDefault(r => r.Category == 1);

                 if (rules == null || ruleContent == null)
                 {
                     if (String.IsNullOrEmpty(title))
                     {
                         webPage.ContentText = Utilities.GetPageHTML(webPage.Url, null, null, true, out title);
                         webPage.Name = title;
                     }
                     else
                     {
                         webPage.ContentText = Utilities.GetPageHTML(webPage.Url, null, null, true) ;// GetPageContent(webPage.Url) + "</div></div>";
                     }
                 }
                 else
                 {
                     if (String.IsNullOrEmpty(title))
                     {
                         webPage.ContentText = ruleContent.Prefix + Utilities.GetPageHTML(webPage.Url, ruleContent.StartString, ruleContent.EndString, true, out title) + ruleContent.Suffix;
                         webPage.Name = title;
                     }
                     else
                     {
                         webPage.ContentText = ruleContent.Prefix + Utilities.GetPageHTML(webPage.Url, ruleContent.StartString, ruleContent.EndString, true) + ruleContent.Suffix;// GetPageContent(webPage.Url) + "</div></div>";
                     }
                 }
            }
            catch
            { }
            webPage.ContentHTML = Utilities.CompressHtml(webPage.ContentText, webPage.Url);// CompressHtml(webPage.ContentText, webPage.Url);
            return webPage;
        }

        public List<WebPageInfo> GetNavigationPageContent(string url)
        {
            //WebPageInfo webPage = null;
            List<WebPageInfo> pages = new List<WebPageInfo>();

            HttpWebRequest req = WebRequest.Create(url) as HttpWebRequest;
            string responseXml = string.Empty;
            string httpStatus = string.Empty;
            WebResponse resp = null;

            try
            {
                resp = req.GetResponse();
                responseXml = GetResponse(resp);
                httpStatus = resp.Headers["Status"];

                //textBox1.Text = responseXml;
                string pattern = @"(?<=<div class=""post_item_body"">)[\s\S]*?(?=</h3>)";
                Regex regex = new Regex(pattern, RegexOptions.IgnoreCase);
                StringBuilder sb = new StringBuilder();
                string p1 = @"(?<=<a [^>]+?>)[\s\S]*?(?=</a>)";
                string achorHref = @"(?<=href="")[\s\S]*?(?="" )";
                foreach (Match match in regex.Matches(responseXml))
                {
                    Regex tmpRegex = new Regex(p1, RegexOptions.IgnoreCase);
                    MatchCollection matches = tmpRegex.Matches(match.Value.Trim());

                    if (matches.Count > 0)
                    {
                        WebPageInfo webPage = new WebPageInfo();
                        webPage.Id = Guid.NewGuid();
                        webPage.Name = matches[0].Value;

                        tmpRegex = new Regex(achorHref, RegexOptions.IgnoreCase);
                        matches = tmpRegex.Matches(match.Value.Trim());
                        if (matches.Count > 0)
                        {
                            webPage.Url = matches[0].Value;
                            webPage.CreatedDate = DateTime.Now;

                            if (GetCountByUrl(webPage.Url) > 0)
                            {
                                continue;
                            }

                            //webPage.ContentText = GetHtmlContent ( GetPageContent(webPage.Url) + "</div></div>");
                            webPage.ContentText = GetPageContent(webPage.Url) + "</div></div>";

                            // <img style="width: 648px; height: 441px" border="0" alt="" src="http://images.cnblogs.com/cnblogs_com/wuhenke/PInvoke1.jpg" width="648" height="441" />

                            webPage.ContentHTML = CompressHtml(webPage.ContentText, webPage.Url);

                            WebPageDao dao = new WebPageDao();
                            dao.Insert(webPage, false, null);

                        }

                        pages.Add(webPage);
                    }
                }

            }
            catch (WebException wex)
            {
                //var twitterQueryEx = CreateTwitterQueryException(wex);
                //throw twitterQueryEx;
            }
            finally
            {
                if (resp != null)
                {
                    resp.Close();
                }
            }
            return pages;
        }

        private static string GetFirstMatch(string pattern, string originalText)
        {
            MatchCollection matches = GetMatches(pattern, originalText);
            if (matches.Count > 0)
            {
                return matches[0].Value;
            }
            else
            {
                return null;
            }
        }


        private static MatchCollection GetMatches(string pattern, string originalText)
        {
            Regex tmpRegex = new Regex(pattern, RegexOptions.IgnoreCase);
            MatchCollection matches = tmpRegex.Matches(originalText);

            return matches;
        }

        private static byte[] GetImageStream(string imgSrc)
        {
            byte[] buf = null;
            WebRequest request = WebRequest.Create(imgSrc);
            WebResponse response = request.GetResponse();
            try
            {
                using (Stream from = response.GetResponseStream())
                using (Stream to = new MemoryStream())
                {
                    int readCount;
                    byte[] buffer = new byte[1024];

                    while ((readCount = from.Read(buffer, 0, buffer.Length)) != 0)
                    {
                        to.Write(buffer, 0, readCount);
                    }

                    buf = new Byte[to.Length];
                    to.Read(buf, 0, buf.Length);
                    to.Close();
                }
            }
            catch
            {

            }
            finally
            {
                response.Close();
            }
            return buf;
        }

        private static void GetImageStream(ZipOutputStream to,string imgSrc)
        {
            WebRequest request = WebRequest.Create(imgSrc);
            WebResponse response = request.GetResponse();
            try
            {
                using (Stream from = response.GetResponseStream())
                {
                    int readCount;
                    byte[] buffer = new byte[1024];

                    while ((readCount = from.Read(buffer, 0, buffer.Length)) != 0)
                    {
                        to.Write(buffer, 0, readCount);
                    }
                }
            }
            catch
            {

            }
            finally
            {
                response.Close();
            }
        }

        private static void GetImageStream1(Ionic.Zip.ZipOutputStream to, string imgSrc)
        {
            WebRequest request = WebRequest.Create(imgSrc);
            WebResponse response = request.GetResponse();
            try
            {
                using (Stream from = response.GetResponseStream())
                {
                    int readCount;
                    byte[] buffer = new byte[1024];

                    while ((readCount = from.Read(buffer, 0, buffer.Length)) != 0)
                    {
                        to.Write(buffer, 0, readCount);
                    }
                }
            }
            catch
            {

            }
            finally
            {
                response.Close();
            }
        }


        #region Compression

        public static byte[] CompressHtml1(string html)
        {
            byte[] byData = null;
            using (MemoryStream ms = new MemoryStream())
            using (Ionic.Zip.ZipOutputStream zip = new Ionic.Zip.ZipOutputStream(ms))
            {
                zip.CompressionLevel = CompressionLevel.BestCompression;

                #region
                zip.PutNextEntry("ArticleContent.htm");
                using (MemoryStream memoryStream = StringToMemoryStream(html))
                {
                    int readCount;
                    byte[] buffer = new byte[4096];
                    while ((readCount = memoryStream.Read(buffer, 0, buffer.Length)) != 0)
                    {
                        zip.Write(buffer, 0, readCount);
                    }
                }
                #endregion

                #region
                string imgPattern = @"(?<=<img )[\s\S]*?(?=>)";
                MatchCollection matches1 = GetMatches(imgPattern, html);
                StringCollection sc = new StringCollection();
                foreach (Match m in matches1)
                {
                    string imgPattern2 = @"(?<=src[\s]*=[\s]*""?)[\s\S]*?(?=""? )";
                    MatchCollection matches2 = GetMatches(imgPattern2, m.Value);
                    if (matches2.Count > 0)
                    {
                        string imgSrc = matches2[0].Value.Trim().Trim('\"');

                        string imgName = imgSrc.Substring(imgSrc.LastIndexOf('/') + 1);

                        if (!sc.Contains(imgName))
                        {
                            zip.PutNextEntry(imgName);
                            GetImageStream1(zip, imgSrc);

                            sc.Add(imgName);
                        }
                    }
                }
                #endregion

                //zip.Flush();

                //using (MemoryStream to = new MemoryStream())
                //{
                //    int readCount;
                //    byte[] tmp = new byte[1024];

                //    while ((readCount = ms.Read(tmp, 0, tmp.Length)) != 0)
                //    {
                //        to.Write(tmp, 0, readCount);
                //    }

                //    byData = new Byte[to.Length];
                //    to.Position = 0;
                //    to.Read(byData, 0, byData.Length);
                //    to.Close();
                //}

                byData = new Byte[ms.Length];
                //s.Position = 0;
                ms.Position = 0;
                ms.Read(byData, 0, byData.Length);
            }

            return byData;
        }


        public static byte[] CompressHtml(string html, string url)
        {
            byte[] byData = null;
            using (MemoryStream ms = new MemoryStream())
            using (ZipOutputStream zip = new ZipOutputStream(ms))
            {
                zip.SetLevel(9); // 0 - store only to 9 - means best compression
                byte[] buffer = new byte[4096];

                #region
                string imgPattern = @"(?<=<img )[\s\S]*?(?=>)";
                MatchCollection matches1 = GetMatches(imgPattern, html);
                StringCollection sc = new StringCollection();
                foreach (Match m in matches1)
                {
                    string imgPattern2 = @"(?<=src[\s]*=[\s]*""?)[\s\S]*?(?=""? )";
                    MatchCollection matches2 = GetMatches(imgPattern2, m.Value);
                    if (matches2.Count > 0)
                    {
                        string imgSrc = matches2[0].Value.Trim().Trim('\"');

                        string imgName = imgSrc.Substring(imgSrc.LastIndexOf('/') + 1);

                        int index = imgName.IndexOf('?');
                        if (imgName.IndexOf('?') != -1)
                        {
                            imgName = Guid.NewGuid().ToString("n") + ".png";
                        }

                        if (imgName.IndexOfAny(Path.GetInvalidFileNameChars()) != -1)
                        {
                            imgName = Guid.NewGuid().ToString("n") + ".png";
                        }
                        
                        html = html.Replace(imgSrc, imgName);

                        if (!sc.Contains(imgName))
                        {
                            ZipEntry entry = new ZipEntry(imgName);
                            entry.DateTime = DateTime.Now;
                            
                            zip.PutNextEntry(entry);
                            //
                            if (imgSrc.StartsWith("/"))
                            {
                                Uri uri = new Uri(url);
                                imgSrc = uri.Scheme + "://" + uri.Host + imgSrc;
                            }


                            GetImageStream(zip, imgSrc);

                            sc.Add(imgName);
                        }
                    }
                }
                #endregion

                #region 
                ZipEntry zipEntry = new ZipEntry("ArticleContent.htm");
                zipEntry.DateTime = DateTime.Now;
                zip.PutNextEntry(zipEntry);
                using (MemoryStream memoryStream = StringToMemoryStream(html))
                {
                    int readCount;
                    while ((readCount = memoryStream.Read(buffer, 0, buffer.Length)) != 0)
                    {
                        zip.Write(buffer, 0, readCount);
                    }
                }
                #endregion

                zip.Finish();
                zip.Flush();

                byData = new Byte[ms.Length];
                //s.Position = 0;
                ms.Position = 0;
                ms.Read(byData, 0, byData.Length);

                zip.Close();
            }

            return byData;
        }


        /// <summary>
        /// 将字符串转换为 MemoryStream.
        /// </summary>
        static MemoryStream StringToMemoryStream(string s)
        {
            byte[] buffer = System.Text.Encoding.UTF8.GetBytes(s);
            return new System.IO.MemoryStream(buffer);
        }

        /// <summary>
        /// 将 MemoryStream 转换为. Makes some assumptions about the content of the stream. 
        /// </summary>
        /// <param name="s"></param>
        /// <returns></returns>
        static String MemoryStreamToString(MemoryStream ms)
        {
            byte[] byteArray = ms.ToArray();
            return System.Text.Encoding.UTF8.GetString(byteArray);
        }

        /// <summary>
        /// 流复制
        /// </summary>
        /// <param name="src"></param>
        /// <param name="dest"></param>
        static void CopyStream(System.IO.Stream src, System.IO.Stream dest)
        {
            byte[] buffer = new byte[1024];
            int len = src.Read(buffer, 0, buffer.Length);
            while (len > 0)
            {
                dest.Write(buffer, 0, len);
                len = src.Read(buffer, 0, buffer.Length);
            }
            dest.Flush();
        }

        static byte[] Compress(string originalText)
        {
            byte[] byData = null;
          
            using (MemoryStream ms = new MemoryStream())
            using (ZlibStream zlibStreamOut = new ZlibStream(ms, CompressionMode.Compress, CompressionLevel.BestCompression, true))
            {
                CopyStream(StringToMemoryStream(originalText), zlibStreamOut);
                zlibStreamOut.Close();

                ms.Position = 0;
                byData = new Byte[ms.Length];
                ms.Read(byData, 0, byData.Length);

            }

            return byData;
            
        }
        static string Decompress(byte[] buffer)
        {
            string text = "";
            using (MemoryStream ms = new MemoryStream(buffer))
            using (ZlibStream zlibStreamOut = new ZlibStream(ms, CompressionMode.Decompress, true))
            {
                CopyStream(ms, zlibStreamOut);
                zlibStreamOut.Close();
                text = MemoryStreamToString(ms);
            }
            return text;
        }

        #endregion

    }
}
