begin
  require 'main/init'
rescue LoadError
  require 'src/main/ruby/main/init'
end



require 'test/unit'
require 'shoulda/integrations/test_unit'
require 'lib/crawling'

java_import [de.recipeminer.persistence.Db4oContainer]

#Todo: Needs update according to api for database access (Db4oDescriptor...)
class TestChefkochCrawler < Test::Unit::TestCase
  context "#{Crawling::ChefkochCrawler}" do
    setup do
      @crawler = Crawling::ChefkochCrawler.new
    end
    context "using the #{Crawling::ChefkochAPIURLBuilder} module" do
      should "be able to constuct a URL to retrieve a recipe" do
        expected_url = 'http://api.chefkoch.de/api/1.0/api-recipe.php' +
          '?ID=506631145837800'
        actual_url = @crawler.construct_recipe_url(506631145837800)
        assert_equal(expected_url, actual_url)
      end
    
      should "be able to construct a URL to sequetially query all available" +
        "recipes" do
        expected_url = 'http://api.chefkoch.de/api/1.0/api-recipe-search.php' +
          '?Suchbegriff=&start=30&limit=30'
        actual_url = @crawler.construct_sequential_search_url(30)
        assert_equal(expected_url, actual_url)
      end
    
      should "be able to convert a hash with default values into a param hash"+
        ", converting String keys into Symbol keys" do
        params = {:start => 15}
        defaults = {"limit" => 30}
        result = @crawler.merge_defaults_into_params(params, defaults)
        assert_equal({:start => 15, :limit => 30}, result)
      end
    end


    should "be able to create 900 recipes urls from 30 query urls without errors" do
      @crawler = ChefkochCrawlerStubs::TestURLGenerationStub.new
      @crawler.sequential_crawl
    end

    should "be able to retrieve 120 recipes from the online api" do
      @crawler.sequential_crawl
      crawling_state = de.recipeminer.crawling.chefkoch.ChefkochCrawlingState.instance
      assert_true(crawling_state.crawled_count >= 120)
      session = Db4oContainer::DEFAULT.session
      obj_set = session.query(de.recipeminer.crawling.chefkoch.ChefkochJsonRecipe.java_class)
      assert_true(obj_set.size >= 120)
      session.close
    end
  end
end

module ChefkochCrawlerStubs
  class TestURLGenerationStub < Crawling::ChefkochCrawler
    def sequential_crawl
      crawled_count = 0
      state = CrawlingStateStub.new
      step = @conf[:'search-api'][:'query-param-defaults'][:limit] || 30
      puts "step: #{step}"
      while(state.crawled_count < 900)
        search_uri = construct_sequential_search_url(state.last_query_index)
        recipe_list = parse_json_from_uri(search_uri)['result']
        recipe_list.each do |r|
          id = r['RezeptID'].to_i
          showid = r['RezeptShowID']
          state.recipe_crawled(showid)
        end
        state.last_query_index += step
      end
    end
  end
  class CrawlingStateStub
    def initialize
      @crawled_count = 0
      @last_query_index = 0
    end
    attr_accessor :'last_query_index'
    attr_reader :'crawled_count'

    def recipe_crawled(showid)
      @crawled_count += 1
    end
  end
end