/*
 * Copyright 2009 by Konstantin Bulenkov
 *
 * Licensed under the Apache License, Version 2.0 (the "License");
 * you may not use this file except in compliance with the License.
 * You may obtain a copy of the License at
 *
 * http://www.apache.org/licenses/LICENSE-2.0
 *
 * Unless required by applicable law or agreed to in writing, software
 * distributed under the License is distributed on an "AS IS" BASIS,
 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
 * See the License for the specific language governing permissions and
 * limitations under the License.
 */

package com.bulenkov.crawler;

import com.bulenkov.crawler.model.CrawlerModel;
import com.bulenkov.crawler.util.Downloader;
import com.bulenkov.crawler.util.Page;
import com.bulenkov.crawler.util.UrlParser;

import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;

/**
 * @author Konstantin Bulenkov (konstantin@bulenkov.com)
 */
public class CrawlerThread extends Thread {
  private static Map<String, Integer> ids = new HashMap<String, Integer>();
  private CrawlerModel model;
  private volatile long bytesDownloaded = 0;
  private boolean paused = false;
  private boolean finished;

  public CrawlerThread(ThreadGroup group, CrawlerModel model) {
    super(group, generateName(group));
    this.model = model;
  }

  private static String generateName(ThreadGroup group) {
    final String name = group.getName();
    Integer num = ids.get(name);

    if (num == null) num = 0;
    
    num++;
    ids.put(name, num);
    return name + "-" + num;
  }

  public void pause() {
    paused = true;
  }

  public boolean finished() {
    return finished;
  }

  public long getBytesDownloaded() {
    return bytesDownloaded;
  }

  @Override
  public void run() {
    while (model.hasNext() && !paused) {      
      CURL curl = model.next();
      Page page = Downloader.download(curl.url());
      if (page == null || page.getStatusCode() != 200) {
        model.markUnreachable(curl);
        model.add(new CrawlerThreadResult(new ArrayList<CURL>(0)));
      } else {
        try {
          bytesDownloaded += page.getContent().length();
        } catch (Exception e) {
          e.printStackTrace();
          model.markUnreachable(curl);
          continue;
        }
        final List<CURL> curls = UrlParser.retrieveLinks(curl, page.getContent());
        model.add(new CrawlerThreadResult(curls));
      }
    }
    finished = true;
  }
}
