#encoding: utf-8
require 'lib/configuration'
require 'net/http'
require 'json'
require 'set'
require 'pp'
require 'yaml'
require 'singleton'

java_import [de.recipeminer.environments.ExecutionContextProvider,
             de.recipeminer.environments.ExecutionContext,
             de.recipeminer.crawling.chefkoch.ChefkochCrawlingState,
             de.recipeminer.crawling.chefkoch.ChefkochJsonRecipe,
             de.recipeminer.crawling.chefkoch.ChefkochJsonConverter]

java_import(org.apache.log4j.Logger) { 'Log4J' }

module Crawling
  module URLBuilder

    # Searches in url_template for insertion marks like §param$ and replaces
    # this insertions marks with the values either given by the params hash
    # or (if no corrsponding key is fouund there) with the value of the defaults
    # hash
    def construct_query_uri(url_template, params,
        defaults = {}, param_delimiter = '§')
      params = merge_defaults_into_params(params, defaults)
      url = url_template.dup
      begin
        while /§(.+?)§/ =~ url
          key = $1
          url.gsub!($&) do |match|
            insertion = params[key.to_sym]
            unless insertion
              raise "Substitution for #{key} unknown in #{params}"
            end
            insertion
          end
        end
      rescue Exception => e

        puts 'Error constructing query url:'
        puts e.inspect
      end
      url
    end

    def merge_defaults_into_params(params, defaults)
      sym_defaults = defaults.str_keys_to_sym_keys
      sym_defaults.merge params
    end
  end

  module ChefkochAPIURLBuilder
    include URLBuilder

    def construct_sequential_search_uri(start)
      conf = @conf[:'search-api']
      url_template = conf[:'base-url'] + conf[:'format']
      search_defaults = conf[:'query-param-defaults']
      construct_query_uri(url_template, {:start => start}, search_defaults)
    end

    def construct_recipe_uri(showid)
      conf = @conf[:'recipe-api']
      url_template = conf[:'base-url'] + conf[:'format']
      construct_query_uri(url_template, {:showid => showid.to_s})
    end
  end

  class ChefkochCrawler
    include ChefkochAPIURLBuilder

    def initialize
      @conf = Configuration::ConfigContainer.instance[:crawling][:chefkoch]
      @uuid_prefix = @conf[:'uuid-prefix']
      raise "UUID prefix not found" if @uuid_prefix.nil?
      @db4o = ExecutionContextProvider.current_container
    end
    # Systematically crawls JSON-serialistaions of the recipes from www.chefkoch.de.
    # It sends search requests for 30 recipes, extracts 30 showids and uses each of
    # these showids to download the JSON-serialisation of the recipe itself.
    # Accepts an opts-Hash that can specify :max_recipe_count and :max_additional_recipes
    # to stop the crawling process after one of theses limits is reached. Otherwhise, this
    # method will crawl all remaining recipes until the search-requests yield no additional
    # results (which may take a very long time).
    # Along with ChefkochJsonRecipe-Objects, that contain the crawled JSON-serialisations,
    # the current state of the crawling process is saved into the database and crawling will
    # proceed at the last saved state.
    def sequential_crawl(opts)
      opts = {:max_recipe_count => 10000, :max_additional_recipes => 10000}.update(opts)
      state = ChefkochCrawlingState.instance
      progress_observer = CrawlingProgressObserver.new(state, opts)
      step = @conf[:'search-api'][:'query-param-defaults'][:limit] || 30
      last_status_msg = 0
      begin
        while not progress_observer.limit_reached?(state)
          json_recipes_cache = []
          search_response, search_uri = nil, nil
          begin
            search_uri = construct_sequential_search_uri(state.last_query_index)
            search_response = fetch_response_body(search_uri)
            recipe_list = JSON.parse(search_response)['result']
            #download_delay()
          rescue Exception => e
            puts "Fetch or parse of ShowIds from #{search_uri} failed:\n#{e.inspect}"
            state.recipe_conversion_failed(showid, e.inspect)
            state.last_query_index += step
            next
          end
          break if recipe_list.empty?
          recipe_list.each do |recipe|
            id = recipe['RezeptID'].to_i
            showid = recipe['RezeptShowID']
            begin
              unless state.getCrawledShowIDs.contains(showid) # do not save duplicates
                json_recipes_cache << ChefkochJsonRecipe.new("#@uuid_prefix#{id}", fetch_recipe(showid))
                download_delay()
              end
            rescue Exception => e
              puts "Fetch or creation of raw recipe #{id} failed:\n#{e.inspect}"
              state.recipe_download_failed(showid, e.inspect)
            else
              state.recipe_crawled(showid)
            end
          end
          puts "Saving JSON recipes in cache (#{json_recipes_cache.size})"
          begin
            json_recipes_cache.each { |recipe| @db4o.store(recipe) }
            state.last_query_index += step
            state.db4o_companion.save
            @db4o.commit
          rescue Exception => e
            puts "Storing of recipe cache or update of crawling state into the database failed:\n#{e.inspect}"
            @db4o.rollback
          end
          #state = ChefkochCrawlingState.instance #replace stale crawling state
          if (state.crawled_count / 1000) > last_status_msg
            puts "#{state.crawled_count} JSON recipes saved"
            last_status_msg = state.crawled_count / 1000
          end
        end
      ensure
        @db4o.rollback
        @db4o.close
      end
    end

    def single_download(id, show_id)
      begin
        recipe_filtered_json = fetch_recipe(show_id)
        puts "filtered recipe class: #{recipe_filtered_json.class}"  #(Java: #{recipe_filtered_json.java_class})"
        puts "filtered recipe encoding: #{recipe_filtered_json.encoding}"
         @db4o.store(ChefkochJsonRecipe.new("#@uuid_prefix#{id}", recipe_filtered_json))
        state = ChefkochCrawlingState.instance
        state.recipe_crawled(show_id.to_s)
        state.db4o_companion.save
      rescue Exception => e
        @db4o.rollback
        @db4o.close
        raise e
      else
        @db4o.commit
        @db4o.close
      end
    end

    def download_delay
      sleep_duration = 1 + (rand(10)-5)*0.1
      sleep(sleep_duration)
    end

    def fetch_recipe(showid)
      recipe_uri = construct_recipe_uri(showid)
      complete_json_str = fetch_response_body(recipe_uri)
      ChefkochJsonConverter.extract_and_filter_json_recipe(complete_json_str)
    end

    def fetch_response_body(uri)
      uri = uri.instance_of?(URI) ? uri : URI.parse(uri)
      response = Net::HTTP.get_response(uri)
      if not response.header.is_a? Net::HTTPOK
        raise "GET request for #{uri} yielded no OK response (got #{response.header.class} instead)."
      end
      response.body
    end

    class CrawlingProgressObserver
      def initialize(crawling_state, opts)
        @init_recipe_count = crawling_state.crawled_count
        @max_recipe_count = opts[:max_recipe_count] || nil
        @max_additional_recipes = opts[:max_additional_recipes] || nil
      end

      def limit_reached?(crawling_state)
        current = crawling_state.crawled_count
        limit_reached = @max_recipe_count ? current >= @max_recipe_count : false
        unless (limit_reached)
          limit_reached = @max_additional_recipes ? (current-@init_recipe_count) >= @max_additional_recipes : false
        end
        limit_reached
      end
    end
  end
end
