require "open-uri"
require "timeout"
class Sport < ActiveRecord::Base
  belongs_to :country
  has_many   :leagues
  has_many   :matches
  has_many   :teams

  # Get match data from handicap feed and create matches
  def fetch_matches
    #cid = leagues.collect(&:cid) * ","
    leagues.collect(&:cid).compact.uniq.each do |cid|
      username = SETTINGS["feed"]["username"]
      password = SETTINGS["feed"]["password"]
      url = "http://xml.sportspunter.com/xml?username=#{username}&password=#{password}&format=xml&cid=#{cid}"
      last_fetched_at = FetchLog.last_fetched_at
      url += "&time=#{last_fetched_at.to_i * 1000}" if last_fetched_at
      puts "fetch from url #{url}"
      content = nil
      begin
        timeout 15 do
          content = open(url){|f| f.read}
        end
      rescue Timeout::Error=>e
        puts "fetch time out. skip."
        return
      rescue Exception=>e
        puts "unexpected exception:"
        puts e.message
        return
      end
      dir = File.join(Rails.root,"tmp","sportspunter")
      FileUtils.mkdir(dir) unless File.exists?(dir)
      File.open(File.join(dir,"xml-#{cid}-#{Time.now.to_i}.xml"),"w+") do |file|
        file.write(content)
      end
      hash = Hash.from_xml(content)
      # puts hash.inspect
      begin
        competitions = [hash["SPORTSPUNTER"]["competition"]].flatten
      rescue Exception=>e
        puts "data invalid: #{hash.inspect}"
        competitions = []
      end
      competitions.each do |competition|
        next unless competition && competition["matches"] && competition["matches"]["match"]
        league = League.from_hash competition,self
        matches = [competition["matches"]["match"]].flatten
        matches.each do |match|
          m = Match.from_hash(match,league)
          puts "#{m.home_team.name} vs #{m.away_team.name}" if m
        end
      end
    end
    FetchLog.update_last_fetched_at
    Match.count
  end

  # Determine match results with match results feed
  def self.fetch_results
    urls = [
      "http://www.scorespro.com/rss/live-soccer.xml",
      "http://www.scorespro.com/rss/live-basketball.xml"
    ]
    next_match = Match.next_match
    matches = [] 
    urls.each do |url|
      content = open(url) do |file|
        file.read
      end
      data = Hash.from_xml(content)
      # cache feeds
      data = append_cached_data(data,url)
      
      data["rss"]["channel"]["item"].each do |item|
        title = item["title"]
        # regexp = /^FT\s*(.+?)\s*\-\s*(.+?)\s*(\d{1,3})\s*\-\s*(\d{1,3}).+?$/
        regexp = /^FT\s*(.+?)\s*\-\s*(.+?)\s*(\d{1,3})\s*\-\s*(\d{1,3}).+?$/
        if title =~ regexp
          home_team_name,away_team_name,home_point,away_point =  $1,$2,$3,$4
	  puts "#{home_team_name},#{away_team_name},#{home_point},#{away_point}"

          # first lookup if name_alias exists.
          # if name_alias does not exist, then use name given.
          home_team = Team.find_by_name_alias(home_team_name)
          home_team = Team.find_by_name(home_team_name) if home_team.nil?

          away_team = Team.find_by_name_alias(away_team_name)
          away_team = Team.find_by_name(away_team_name) if away_team.nil? 

          if home_team && away_team
            # name_alias exists. then set alias_done to true
            unless home_team.alias_done
              home_team.alias_done = true
              home_team.save
            end

            unless away_team.alias_done
              home_team.alias_done = true
              home_team.save
            end

            # if match exists, then process it
            match = Match.find_by_home_team_id_and_away_team_id_and_played(home_team.id,away_team.id,false)
            if match
              # match.play_from_point(home_point,away_point)
              if next_match == match
                # found the next match
                next_match.play_from_point(home_point,away_point)
                next_match = Match.next_match
                matches << match
              end
            end
          end
        end
      end
    end
    matches
  end

  def self.fetch_matches
    matches_count = 0
    Sport.all.each{|sport| matches_count += sport.fetch_matches.to_i}
    matches_count
  end

  # Retain part of the match data for log purposes
  def self.append_cached_data(data,url)
    url_hash = Digest::MD5.hexdigest(url)
    cache_file = File.join(Rails.root,"tmp","cached_feeds_#{url_hash}.yml")
    begin
      cached_items = YAML.load_file File.read(cache_file)
    rescue
      data
    ensure
      # write new feeds to cache file
      cached_items ||= []
      cached_items.each do |item|
        data["rss"]["channel"]["item"].unshift(item)
        data["rss"]["channel"]["item"].uniq!
        data["rss"]["channel"]["item"].pop if data["rss"]["channel"]["item"].size > 500 # we cache 300 results
      end
      File.open(cache_file,"w+") do |file|
        file.write data['rss']['channel']['item'].to_yaml
      end
    end
    # puts data['rss']['channel']['item'].inspect
    data
  end

  # Check if football
  def football?
    !!(name =~ /^football$/i)
  end

  # Check if basketball
  def basketball?
    !!(name =~ /^basketball$/i)
  end

=begin
  def self.fetch_results
    Sport.all.each{|sport| sport.fetch_results}
  end
=end

end
