begin
  require 'main/init'
rescue LoadError
  require 'src/main/ruby/main/init'
end

require 'pathname'
require 'test/unit'
require 'shoulda/integrations/test_unit'
require 'lib/configuration'
require 'lib/crawling'

java_import [de.recipeminer.environments.ExecutionContextProvider,
             de.recipeminer.environments.ExecutionContext,
             de.recipeminer.persistence.Db4oHelpers,
             de.recipeminer.crawling.chefkoch.ChefkochCrawlingState,
             de.recipeminer.crawling.chefkoch.ChefkochJsonRecipe]


#Todo: Needs update according to api for database access (Db4oDescriptor...)
class TestChefkochCrawler < Test::Unit::TestCase

  GO_ONLINE = true

  context "#{Crawling::ChefkochCrawler}" do
    setup do
      @crawler = Crawling::ChefkochCrawler.new
      ExecutionContextProvider.setClearTest
    end
    context "using the #{Crawling::ChefkochAPIURLBuilder} module" do
      should "be able to constuct a URL to retrieve a recipe" do
        expected_uri = 'http://api.chefkoch.de/api/1.0/api-recipe.php' +
            '?ID=506631145837800'
        actual_uri = @crawler.construct_recipe_uri(506631145837800)
        assert_equal(expected_uri, actual_uri)
      end

      should "be able to construct a URL to sequetially query all available" +
                 "recipes" do
        expected_uri = 'http://api.chefkoch.de/api/1.0/api-recipe-search.php' +
            '?o=4&start=30&limit=30'
        actual_uri = @crawler.construct_sequential_search_uri(30)
        assert_equal(expected_uri, actual_uri)
      end

      should "be able to convert a hash with default values into a param hash"+
                 ", converting String keys into Symbol keys" do
        params = {:start => 15}
        defaults = {"limit" => 30}
        result = @crawler.merge_defaults_into_params(params, defaults)
        assert_equal({:start => 15, :limit => 30}, result)
      end
    end

    should "be able to create 900 recipes urls from 30 query urls without errors" do
      @crawler = ChefkochCrawlerStubs::TestURLGenerationStub.new
      @crawler.sequential_crawl
    end

    if GO_ONLINE
      should "be able to retrieve 60 recipes from the online api" do
        container = Db4oHelpers.current_database
        container.clear
        begin
          crawling_state = ChefkochCrawlingState.instance
          #randomize crawling start to make crawling tests less conspicuous
          #to the REST-service provider
          crawling_state.last_query_index = (rand(100) + 42)*30
          @crawler.sequential_crawl :max_recipe_count => 60

          assert_equal(60, crawling_state.crawled_count,
                       "Crawling state knows about 60 raw recipes")
          obj_set = container.query(ChefkochJsonRecipe.java_class)
          assert_equal(60, obj_set.size,
                       'There should be 60 raw recipes in the database')
        ensure
          container.clear
          container.close

        end
      end
    end
  end
end

module ChefkochCrawlerStubs
  class TestURLGenerationStub < Crawling::ChefkochCrawler
    def sequential_crawl
      state = CrawlingStateStub.new
      step = @conf[:'search-api'][:'query-param-defaults'][:limit] || 30
      while (state.crawled_count < 900)
        search_uri = construct_sequential_search_uri(state.last_query_index)
        search_response = fetch_response_body(search_uri)
        recipe_list = JSON.parse(search_response)['result']
        recipe_list.each do |r|
          showid = r['RezeptShowID']
          state.recipe_crawled(showid)
        end
        state.last_query_index += step
      end
    end

    alias_method :download_response_body, :fetch_response_body

    def fetch_response_body(uri)
      case uri
        when %r{http://api.chefkoch.de/api/1.0/api-recipe-search.php?.*}
          return read_from_file_or_download(uri, 'search_response.json')
        when %r{http://api.chefkoch.de/api/1.0/api-recipe.php?.*}
          return read_from_file_or_download(uri, 'recipe_response.json')
      end
    end

    private
    def read_from_file_or_download(uri, filename)
      response_file = Pathname(File.join(stub_data_path, filename))
      response = nil
      unless response_file.exist?
        puts "Downloading and saving stub response for: #{uri}"
        response = download_response_body(uri)
        open(response_file.to_s, 'w:utf-8') do |f|
          f.write(response)
        end
        return response
      end
      open(response_file.to_s, 'r:utf-8') do |f|
        response = f.read
      end
      return response
    end

    def stub_data_path
      unless @stub_path
        db_path = Pathname(Configuration.resource('recipeminer_test.db4o')).parent
        @stub_path = File.join(db_path, 'stub_data')
      end
      @stub_path
    end
  end

  class CrawlingStateStub
    def initialize
      @crawled_count = 0
      @last_query_index = 0
    end

    attr_accessor :'last_query_index'
    attr_reader :'crawled_count'

    def recipe_crawled(showid)
      @crawled_count += 1
    end
  end
end