require 'optparse'
require 'sqlite3'
require 'rubygems'
require 'active_record'
require 'mechanize'

$options = {}
parser = OptionParser.new("", 24) do |opts|
  opts.banner = "\nScraper 1.0\nAuthor: Louis (Skype: louisprm)\n\n"

  opts.on("-o", "--output FILE", "Output SQLite3 database file") do |v|
    $options[:output] = v
  end

  opts.on("-u", "--username USERNAME", "Username") do |v|
    $options[:username] = v
  end
  
  opts.on("-p", "--password PASSWORD", "Password") do |v|
    $options[:password] = v
  end

  opts.on("-s", "--search URL", "URL") do |v|
    $options[:search] = v
  end

  opts.on_tail('-h', '--help', 'Displays this help') do
		puts opts, "", help
    exit
	end
end

def help
  return <<-eos

GUIDELINE
-------------------------------------------------------
The scraper package includes two scripts

  1. scrape.rb: scrape data from the internet and store to a local database file
  2. export.rb: read the local database and generate the Excel/CSV output

Procedures:

  1. Run the scrape script and store scraped data to local database file main.db
	   
        ruby scrape.rb --username=your@email.com \\
                       --password=yourpasswd \\
                       --search="https://linkedin.com/s=John+Business+Analyst" \\
                       --output=main.db

  2. After the scraper script is done, run the export.rb script to read the main.db
     database and generate the Excel file data.xls

        ruby export.rb --input=main.db --output=/tmp/data.xls

Notes:

- The scrape.rb script supports resuming. Just run the script over and over again
  in case of any failure (due to internet connection problem for instance)to have
  it start from where it left off. Be sure to specify the same output database file
- As the scrape script stores items ony-by-one, you can run the export script
  even when the scraping process is not complete yet. Then it will export available
  items in the local database
 
eos
end

# Parse parameters
parser.parse!

# Validate 
if $options[:username].nil?
  puts "\nPlease specify username: -u\n\n"
  exit
end

if $options[:password].nil?
  puts "\nPlease specify password: -p\n\n"
  exit
end

if $options[:output].nil?
  puts "\nPlease specify output file: -o\n\n"
  exit
end

if $options[:search].nil?
  puts "\nPlease specify search URL: -s\n\n"
  exit
end

# Establish connection
ActiveRecord::Base.establish_connection(
  adapter: 'sqlite3',
  database: $options[:output],
  timeout: 15000
)

require 'zlib'
class String
  def deflate
    Zlib.deflate(self)
  end

  def inflate
    Zlib.inflate(self)
  end

  def fix
    self.encode!('UTF-8', :undef => :replace, :invalid => :replace, :replace => "")
  end
end

class MySchema < ActiveRecord::Migration
  def change
    create_table :items do |t|
      t.text :url
      t.text :number
      t.text :name
      t.text :title
      t.text :industry
      t.text :location
      t.text :connections
      t.text :skills
      t.text :experiences
    end
  end
end

# initiate the database if not existed
MySchema.new.migrate(:change) unless File.exists?($options[:output])

class Item < ActiveRecord::Base
  serialize :experiences, JSON
end

class Scrape
  SITE = 'https://www.linkedin.com/'

  def initialize
    @a = Mechanize.new
    @a.agent.http.verify_mode = OpenSSL::SSL::VERIFY_NONE
    login()
  end

  def login
    ps = @a.get(SITE).parser;0
    params = {
      "isJsEnabled" => "true",
      "source_app" => "",
      "tryCount" => "",
      "clickedSuggestion" => "false",
      "session_key" => $options[:username],
      "session_password" => "btramlovelee",
      "signin" => "Sign In",
      "session_redirect" => "",
      "trk" => "hb_signin",
      "loginCsrfParam" => ps.css('input[name=loginCsrfParam]').first.attributes['value'].value,
      "fromEmail" => "",
      "csrfToken" => ps.css('input[name=csrfToken]').first.attributes['value'].value,
      "sourceAlias" => ps.css('input[name=sourceAlias]').first.attributes['value'].value,
      "client_ts" => "1421051186573",
      "client_r" => "#{$options[:username]}:399857118:912002531:450679093",
      "client_output" => "-407836433",
      "client_n" => "399857118:912002531:450679093",
      "client_v" => "1.0.1"
    }
    
    ps = @a.post('https://www.linkedin.com/uas/login-submit', params, 'Content-Type' => 'application/x-www-form-urlencoded').parser
    
    if ps.css('body').inner_html[/Sign Out/]
      puts "Logged in"
      return true
    else
      puts "Failed to login"
      return false
    end

    return @a
  end

  def search_by_keyword(keyword)
    search_url = "https://www.linkedin.com/vsearch/f?type=all&keywords=#{keyword}&search=Search"
    ps = @a.get(search_url).parser
    puts ps.css('body').inner_html[/(?<=formattedResultCount...)[^"]+/]
  end
  
  def search_by_url(search_url)
    1.upto(20) { |page|
      if search_url[/page_num/]
        current_url = search_url.gsub(/(?<=page_num=)[0-9]+/, page)
      else
        current_url = search_url.gsub(/&$/, '') + "&page_num=#{page}"
      end

      ps = @a.get(current_url).parser
      puts ps.css('body').inner_html[/(?<=formattedResultCount...)[^"]+/] if page == 1

      puts "****** #{current_url} *****"
      
      # @todo Only works for search people
      ids = ps.css('code').inner_html.scan(/(?<=profile\/view\?id=)[0-9]+/).uniq
      urls = ps.css('body').inner_html.scan(/(?<=link_nprofile_view_4...)[^"]+/)
      
      break if urls.empty?
      urls.each do |url|
        get(url)
      end
    }
  end

  def get(url)
    puts "Scraping: #{url}"

    number = url[/(?<=view\?id=)[0-9]+/]

    if Item.exists?(number: number)
      puts "Already exists"
      return
    end

    ps = @a.get(url).parser

    item = Item.new
    item.url = url
    item.number = number
    item.name = ps.css('span[class=full-name]').text.strip
    item.title = ps.css('#headline p.title').first.text.strip if ps.css('#headline p.title').first
    item.industry = ps.css('#location a[name=industry]').first ? ps.css('#location a[name=industry]').first.text : "n/a"
    item.location = ps.css('#location a[name=location]').first.text if ps.css('#location a[name=location]').first
    item.connections = ps.css('div[class="member-connections"]').text.strip
    item.skills = ps.css('ul[class="skills-section"]').first.css('> li').map{|li| "#{li.css('> span > a').text.strip} - #{li.css('> span > span > a').text.strip }" }.join("; ") if ps.css('ul[class="skills-section"]').first
    item.experiences = ps.css('div#background-experience > div').map{|div| 
      {
        title: div.css('header h5').text,
        company: div.css('header h4').text,
        date: div.css('span.experience-date-locale time').map{|t| t.text.strip }.join(" "),
        locale: div.css('span.experience-date-locale span.locality').text.strip
      }
    }
    
    item.save!

    puts "DONE"
  end
end

# trap Ctrl-C
trap("SIGINT") { throw :ctrl_c }

catch :ctrl_c do
  e = Scrape.new
  #e.search_by_keyword('Technology')
  e.search_by_url($options[:search])
end
