class Scan < ActiveRecord::Base
  has_many :results, :dependent => :destroy
  
  def self.search ac_types
    require 'net/http/persistent'
    pages = []
    http = Net::HTTP::Persistent.new(nil, (PROXY ? URI.parse(PROXY) : nil))
    http.connection_for(URI.parse('http://bilibili.us/'))
    ac_types.each do |ac_type|
      date_now =  Time.now
      head_url = "http://#{BILI_PATH}/video/#{ac_type.page_name}"
      first_page = Nokogiri::HTML(open("#{head_url}-1.html", :proxy => PROXY), nil, 'gb18030')
      page_count = first_page.xpath(%<//span[@class='pageinfo']/strong[1]>).text.to_i
      pages << [ac_type.id, first_page]
      #print  schedule in console
        puts "read #{ac_type.page_name} pages(#{page_count})."
        puts (1..9).inject('0'){|r, i| r + " " * (25 - (i * 50).to_s.size*2) + (i * 50).to_s}
        print "|"
      #end
      th_count = 8
      threads = []      
      (2..page_count).each do |i|
        tmp_url = "#{head_url}-#{i}.html"
        while
          if th_count > 0
            th_count -= 1
            threads.push(Thread.new(i){ |i0|
              tmp_page = search_again_and_again(tmp_url, http)
              pages << [ac_type.id, tmp_page]
              print (i0 % 50).zero? ? "|" : ((i0 % 2).zero? ? "." : "" )
              th_count += 1
            })
            false
          else
            true
          end
        end
      end
      threads.each {|t| t.join}
      print "#{Time.now - date_now} second complete!\n"
    end
    http.shutdown
    pages
  end
  
  def self.work_info_fixed page, ac_type_id, work_update=false
    page.css("div.listpg").each do |listpg|
      title_a = listpg.css("a.title[1]").first
      wid = Scan.get_wid title_a
      name = title_a.text
      cdate = listpg.css("div.date").text
      author_name =  listpg.search("div[@class='upzhu']/b").text
      pic_path = listpg.search("img").first.get_attribute("src").sub(/\/uploads\/userup\//, '')
      work = Work.find_by_wid wid
      if work
          work.update_attributes :cdate => cdate,
                                                :author_name => author_name,
                                                :ac_type_id => ac_type_id,
                                                :pic_path => pic_path  if work_update
      else
          work = Work.create! :name => name,
                                             :wid => wid,
                                             :cdate => cdate,
                                             :ac_type_id => ac_type_id,
                                             :author_name => author_name,
                                             :pic_path => pic_path rescue p("#{wid} can't create")
      end
      work
    end
  end
  
  def add_results page
    page.css("div.listpg").each do |listpg|
      title_a = listpg.css("a.title[1]").first
      work = Work.find_by_wid(Scan.get_wid(title_a))
      if work
        data_div = listpg.search("div[@class='dinfo']/b")
        Result.create! :work_id => work.id,
                       :scan_id => self.id,
                       :clicks =>  data_div[0].text,
                       :comments => data_div[2].text,
                       :danmu => data_div[1].text,
                       :stows => data_div[3].text
      end
    end
  end
  
  def self.search_again_and_again url, http, count = 0
    begin
      puts "again~#{count}" unless count == 0
      re = Timeout.timeout(30){http.request(URI.parse(url))}
      Nokogiri::HTML(re.body, nil ,'gb18030')
    rescue Timeout::Error#, OpenURI::HTTPError
      search_again_and_again(url, http, count+1)
    end
  end

  def self.get_wid title_a
    href = title_a.get_attribute("href")
    wid = href.split('/av')[1].to_i
    wid = href.split('id=').last if wid.zero?
    wid
  end
end
