require 'optparse'
require 'sqlite3'
require 'rubygems'
require 'active_record'
require 'mechanize'

$options = {}
parser = OptionParser.new("", 24) do |opts|
  opts.banner = "\nScraper 1.0\nAuthor: Louis (Skype: louisprm)\n\n"

  opts.on("-o", "--output SQLITE3DB", "Output SQLite3 database file") do |v|
    $options[:output] = v
  end

  opts.on("-n", "--new", "Start new, no resume") do |v|
    $options[:new] = v
  end

  opts.on_tail('-h', '--help', 'Displays this help') do
		puts opts, "", help
    exit
	end
end

def help
  return <<-eos

GUIDELINE
-------------------------------------------------------
The scraper package includes two scripts

  1. scrape.rb: scrape data from the internet and store to a local database file
  2. export.rb: read the local database and generate the Excel/CSV output

Procedures:

  1. Run the scrape script and store scraped data to local database file main.db
	   
        ruby scrape.rb --output=main.db

  2. After the scraper script is done, run the export.rb script to read the main.db
     database and generate the Excel file data.xls

        ruby export.rb --input=main.db --output=/tmp/data.xls

Notes:

- The scrape.rb script supports resuming. Just run the script over and over again
  in case of any failure (due to internet connection problem for instance)to have
  it start from where it left off. Be sure to specify the same output database file
- As the scrape script stores items ony-by-one, you can run the export script
  even when the scraping process is not complete yet. Then it will export available
  items in the local database
 
eos
end

begin
  parser.parse!
rescue SystemExit => ex
  exit
rescue Exception => ex
  puts "\nERROR: #{ex.message}\n\nRun ruby crawler.rb -h for help\n\n"
  exit
end

if $options[:output].nil?
  puts "\nPlease specify output file: -o\n\n"
  exit
end

ActiveRecord::Base.establish_connection(
  adapter: 'sqlite3',
  database: $options[:output],
  timeout: 15000
)

require 'zlib'
class String
  def deflate
    Zlib.deflate(self)
  end

  def inflate
    Zlib.inflate(self)
  end

  def fix
    self.encode!('UTF-8', :undef => :replace, :invalid => :replace, :replace => "")
  end
end

class MySchema < ActiveRecord::Migration
  def change
    create_table :items do |t|
      t.text :url
      t.text :meta
      t.text :suburl
      t.text :caturl
      t.binary :desc
      t.binary :html
    end
  end
end

# initiate the database if not existed
MySchema.new.migrate(:change) unless File.exists?($options[:output])

class Item < ActiveRecord::Base
  serialize :meta, JSON
end

class Scrape
  SITE = 'http://abtec.co.nz/'

  def initialize
    @a = Mechanize.new
    @a.agent.http.verify_mode = OpenSSL::SSL::VERIFY_NONE
  end

  def run
    categories = @a.get(SITE).parser.css('ul.dropdown-vertical').first.css('> li > a:nth-child(1)')
	  categories.each do |category|
      catname = category.xpath('text()').text.strip
      caturl = category.attributes['href'].value
      p catname, caturl

      subcategories = category.css('+ ul > li > a')
      subcategories.each do |subcategory|
        subname = subcategory.xpath('text()').text.strip
        suburl = subcategory.attributes['href'].value
        p "  #{subname}"

        pager(suburl, {catname: catname, caturl: caturl, subname: subname, suburl: suburl})
      end

      if subcategories.empty? # no sublinks
        pager(caturl, {catname: catname, caturl: caturl, subname: nil, suburl: nil})
      end
	  end
  end

  def pager(url, meta)
    1.upto(1000) do |page|
      p "    Page #{page}"
      page_url = File.join(url, page.to_s)
      item_urls = @a.get(page_url).parser.css('#columns > div.pin > form > a').map{|a| a.attributes['href'] }
      item_urls.each do |item_url|
        get(item_url, meta.merge(page_url: page_url))
      end
      
      break if item_urls.empty?
      
    end
  end

  def get(url, meta)
    url = File.join(SITE, url)
    puts url

    if Item.exists?(url: url, suburl: meta[:suburl] || '', caturl: meta[:caturl] || '')
      puts "Already scraped"
      puts "--------------------------------------"
      return
    end

    resp = @a.get(url)
    
    # initiate
    item = Item.new
    
    # key attributes
    item.url = url
    item.meta = meta
    item.suburl = meta[:suburl] || ''
    item.caturl = meta[:caturl] || ''
    item.html = resp.body.deflate
    item.desc = @a.get(resp.parser.css('.productinfo > h2 + iframe').first.attributes['src'].value).body.deflate

    # save
    item.save
    puts "Done"
    puts "--------------------------------------"
  end
end

# trap Ctrl-C
trap("SIGINT") { throw :ctrl_c }

catch :ctrl_c do
  e = Scrape.new
  e.run
end
