require "rubygems"
require "eventmachine"
require "memcache"
require "logger"
require 'activesupport' unless ({}.respond_to? :to_json)

require File.join(File.dirname(__FILE__),'lpm.rb')

class Xiaoqian
  
  # in memcache
  # XQList:
  # [
  #   '10.2.226.37',
  #   '10.2.226.38',
  #   '10.2.226.39'->pids:
  #                       [
  #                         12345,
  #                         23456
  #                         34567
  #                       ]
  # ]
  
  # based on pids in memcache,XiaoQian server will periodic update processes array.
  
  # caches => ['10.2.226.38:11211','10.2.226.38:11212','10.2.226.39:11211']
  # cacches => ['127.0.0.1:11011','127.0.0.1:11012']
  # Xiaoqian.new(15,{:pids=>[28261,28263],:caches=>['10.2.226.133:11011','10.2.226.133:11012']})
  def initialize(timer,opts = {:pids=>[],:caches=>[]})
    @local_ip = get_local_ip
    @timer = timer
    @logger = opts[:log]
    
    # @XQcaches = get_cache_clients(caches)
    @XQcaches = MemCache.new(opts[:caches],:namespace=>"XQ#{@local_ip}")
    @QNYHcaches = MemCache.new(opts[:caches],:namespace=>"QNYH")
    
    @process = get_processes_instance(opts[:pids])
    init_cache_config(opts[:pids],@local_ip)
  end
  
  def init_cache_config(pids,local_ip)
    # load File.join(File.dirname(__FILE__),'config','stipulation.rb')
    @XQcaches.set :localhost,local_ip
    @XQcaches.set :pids,pids
    
    
    register_QNYH_XQList
  end
  
  def register_QNYH_XQList
    list = @QNYHcaches.get(:XQList) || []
    unless list.include?(@local_ip) then
      list << @local_ip
        @QNYHcaches.set(:XQList,list)
    end
    list
  end
  
  def get_local_ip
    require 'socket'
    IPSocket.getaddress(Socket.gethostname)
  end
  
  # proIns =>
  # {12345 => <LPM::Process>,67890 => <LPM::Process>}
  def update_processes_instance(proIns={},pids=[])
    keys = proIns.keys
    pids.each do |pid|
      begin
        proIns[pid] = LPM::Process.new(pid) unless keys.include?(pid)
      rescue Exception => e
        @logger.error "update_processes_instance(#{proIns.inspect},#{pids.inspect})"
        @logger.error e
      end
    end
    keys = proIns.keys
    (keys - pids).each{ |var| proIns.delete var }
    proIns
  end
  
  def get_processes_instance(pids=[])
    h = {}
    if pids then
      pids.each{ |pid|
        begin
         h[pid] = LPM::Process.new(pid)
        rescue Exception => e
          @logger.error "get_processes_instance(#{pids.inspect})"
          @logger.error e
        end
      }
    end
    h
  end
  
  def get_cache_clients(caches=[])
  
    # EM::P::Memcache.connect =>
    # # Connect to a memcached server (must support NOREPLY, memcached >= 1.2.4)
    # def self.connect host = 'localhost', port = 11211
    #   EM.connect host, port, self, host, port
    # end
    caches.inject([]){|arr,ca|
      host,port = ca.split(':')
      if port then
        arr << EM::P::Memcache.connect(host,port.to_i)
      else
        arr << EM::P::Memcache.connect(host)
      end
    }
  end
  
  def wrap_data(data)
    {:ts => Time.now.to_i,:data => data}
  end
  
  # Data from Job
  # => {:ts=>123456789,:data=><Hash>}
  
  def process_job(timer)
    @logger.error "[Pid => #{Process.pid}] -- Process Job start."
    EM.run{
      EM.add_periodic_timer(timer){
        @logger.info "process_job is called."
        @process = update_processes_instance(@process,@XQcaches.get(:pids))
        @logger.info "@process => #{@process.inspect}."
        r = []
        t = []
        
        @process.each do |key,ins|
          tmp = {}
          t << Thread.fork{
            tmp.update({:cpu => ins.cpu})
          }
          tmp.update({
            :pid => ins.pid,
            :cwd => ins.cwd,
            :exe => ins.exe,
            :root => ins.root,
            :cmdline => ins.cmdline,
            :memory => ins.mem
          })
          r << tmp
        end
        t.each{|x|x.join}
        @logger.info "@process result => #{r.inspect}"
        @XQcaches.set :host_process_grid,wrap_data({:root => r,:total => r.size}.to_json)
      }
    }
  end
  
  # LPM::Proc or LPM::Process data will be wraped in json format.
  # This format must be fit to the require of LPMP's javascript.
  
  def proc_job(timer)
    @logger.error "[Pid => #{Process.pid}] -- Proc Job start."
    EM.run{
      # cpu
      EM.add_periodic_timer(timer){
        # cpu
        @logger.debug "proc_job's cpu is called."
        @XQcaches.set :host_proc_cpu,wrap_data({:value => LPM::Proc.cpu[:cpu].to_f,:time => Time.now.strftime("%m-%d %Hh:%Mm:%Ss")}.to_json)
      }
      # loadavg
      EM.add_periodic_timer(timer){
        # loadavg
        @logger.debug "proc_job's loadavg is called."
        @XQcaches.set :host_proc_loadavg,wrap_data({:value => LPM::Proc.loadavg[:load][0].to_f,:time => Time.now.strftime("%m-%d %Hh:%Mm:%Ss")}.to_json)
      }
      # meminfo
      EM.add_periodic_timer(timer){
        # meminfo
        @logger.debug "proc_job's meminfo is called."
        @XQcaches.set :host_proc_meminfo,wrap_data({:root => LPM::Proc.meminfo.inject([]){|a,e|
          a << {:type => e[0],:count => e[1]}
        },:time => Time.now.strftime("%m-%d %Hh:%Mm:%Ss")}.to_json)
      }
      # df
      EM.add_periodic_timer(timer){
        # df
        @logger.debug "proc_job's df is called."
        @XQcaches.set :host_proc_df,wrap_data({:root => LPM::Proc.df.inject([]){|a,e|
          a << {:type => e[0],:count => e[1]}
        },:time => Time.now.strftime("%m-%d %Hh:%Mm:%Ss")}.to_json)
      }
      # pids
      EM.add_periodic_timer(timer){
        # pids
        @logger.debug "proc_job's pids is called."
        @XQcaches.set :host_proc_pids,wrap_data(LPM::Proc.pids.map{ |e| {:text => e[:cmdline],:leaf => true,:cls => "file",:qtip => e[:pid]} }.to_json)
      }
      
      # grid
      EM.add_periodic_timer(timer){
        @logger.info "proc_job's grid is called."
        tmp = {}
        t = Thread.fork{
          tmp.update({:cpu => LPM::Proc.cpu["cpu"]})
        }
        tmp.update({
          :ip => @local_ip,
          :hostname => LPM::Proc.hostname,
          :loadavg => LPM::Proc.loadavg[:load][0],
          :mem_total => LPM::Proc.meminfo[:MemTotal],#.chomp(' kB'),
          :mem_free => LPM::Proc.meminfo[:MemFree],#.chomp(' kB'),
          :uptime_total => LPM::Proc.uptime[:total],
          :uptime_idle => LPM::Proc.uptime[:idle],
          :disk_size => LPM::Proc.df[:size],
          :disk_use => LPM::Proc.df[:use],
          :date => LPM::Proc.date
        })
        t.join
        @logger.info "proc_grid => #{tmp.inspect}."
        @XQcaches.set :host_proc_grid,wrap_data(tmp)
      }
    }
  end
  
  def work_job
    timer = @timer
    @logger.error "work_job will run,and interval timer is #{timer}."
    p = []
    p << Process.fork{
      begin
        self.proc_job(timer)
      rescue Exception => e
        @logger.fatal "Proc Job fatal error!"
        @logger.fatal e.to_s
      end
    }
    if p[0] then
      p << Process.fork{
        begin
          self.process_job(timer)
        rescue Exception => e
          @logger.fatal "Process Job fatal error!"
          @logger.fatal e.to_s          
        end
      }
    end
    @logger.error "work_job is running."
    @logger.error "process and proc job's pid => #{p.inspect}."
    Process.waitall
    @logger.error "work_job shutdown."
  end
  
  def self.run(timer,opts = {:pids=>[],:caches=>[]})
    begin
      # Daemon.start{Xiaoqian.new(timer,{:pids=>opts[:pisd],:caches=>opts[:caches],:log=>opts[:log]}).work_job}
      Xiaoqian.new(timer,{:pids=>opts[:pids],:caches=>opts[:caches],:log=>opts[:log]}).work_job
    rescue Exception => e
      opts[:log].error e
      opts[:log].error "Xiaoqian.new(#{timer},#{{:pids=>opts[:pids],:caches=>opts[:caches],:log=>opts[:log]}}).work_job"
    end
  end
end