import org.cyberneko.html.parsers.SAXParser

class SinababyController {

  def index = { }

  def dosave = {
    def entryList = entryhref()
    entryList.each {
      println it
      def entryInstance = Entry.findBySourceUrl(it.url)
      if (entryInstance) {
        render ' already exists '
      } else {
        def entry = new Entry(it)
        if (entry.catalog) {
          entry.depth = entry.catalog.depth
        }
        entry.save(flush: true)
        if (!entry.hasErrors() && entry.save()) {
          render " save Ok "
        } else {
          render entry.errors
        }
      }
    }
  }

  def itemhref() {
    println " in method1 "
    def itemArray = new String[2]
    itemArray[0] = "/health"
    itemArray[1] = "/edu"
    def urlPrefix = "http://baby.sina.com.cn"
    def parser = new SAXParser()
    parser.setFeature('http://xml.org/sax/features/namespaces', false)
    def itemUrlList = new ArrayList()
    itemArray.each {
      def sinaHost = urlPrefix + it
      def itemPage = new XmlParser(parser).parse(sinaHost)
      def data = itemPage.depthFirst().UL.grep {it.'@class' == "jk_menu_2"}
      data.LI.A.each {link ->
        link.each {
          def urlMap = [:]
          urlMap.url = it.'@href'
          urlMap.title = it.text()
          itemUrlList.add(urlMap)
        }
      }
    }
    return itemUrlList
  }

  def entryhref = {
    println "in method2"
    def itemUrlList = itemhref()
    def parser = new SAXParser()
    parser.setFeature('http://xml.org/sax/features/namespaces', false)
    def urlPrefix = "http://baby.sina.com.cn"
    def entryList = new ArrayList()
    def swapUrlList = new ArrayList()
    itemUrlList.each {
      def entryHost = urlPrefix + it.url
      //println entryHost
      def page = new XmlParser(parser).parse(entryHost)
      def data = page.depthFirst().grep {it.name() == 'SCRIPT'}
      def urlBody = null
      data.each {
        if (it.text().indexOf("data_p=new Array()") != -1)
          urlBody = it.toString()
      }
      String[] urlArray = urlBody.split("\\n")
      urlArray.each {
        if (it.indexOf("data") != -1 && it.indexOf("Array") == -1) {
          swapUrlList.add(it)
        }
      }
      for (int i = 0; i < swapUrlList.size(); i += 2) {
        def entryMap = [:]
        def title = swapUrlList[i].split("=")[1]
        def href = urlPrefix + swapUrlList[i + 1].split("=")[1]
        entryMap.title = title.substring(1, title.length() - 2)
        entryMap.url = href
        entryMap.source = "sina"
        entryMap['catalog.id'] = '2'
        entryMap.content = getcontent(href)        
        entryList.add(urlMap)
      }
    }
    return entryList
  }

  def getcontent = {
    def href = ""
    def buffer = new StringBuffer()
    def parser = new SAXParser()
    def chapter = new XmlParser(parser).parse(url)
    def chapterDate = chapter.depthFirst().DIV.grep {it.'@class' == 'mainContentPad artExpContent' || it.'@class' == 'mainContentPad'}
    println "chapterDate.size() : " + chapterDate.size()
    return buffer.toString() 
  }
}
