package ranks.tasks.get_ready;

import java.io.BufferedReader;
import java.io.IOException;
import java.io.InputStreamReader;
import java.util.HashSet;
import java.util.Set;

import mapred.example.base.TextMapper;
import mapred.util.FeatureMap;

import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.mapred.OutputCollector;
import org.apache.hadoop.mapred.Reporter;

import ranks.Customize;
import ranks.base.Const;
import ranks.base.Data;
import ranks.base.Util;

public class GetReady_Mapper extends TextMapper {

    private static Set<String> seeds = new HashSet<String>();

    private static double times;

    @Override
    public void map(Text key, Text value, OutputCollector<Text, Text> output,
            Reporter reporter) throws IOException {
        FeatureMap map = FeatureMap.parse(value.toString(), Const.DELIM);
        long outDegree = 0;
        for (String target : map.featureSet()) {
            outDegree += map.get(target);
        }

        Data data = Customize.initialData(key.toString(), outDegree, seeds,
                times);

        output.collect(key, new Text(data + "\t" + value));

        reporter.incrCounter("WebSize", "WebSize", 1);
    }

    @Override
    public void setUp() {
        times = Double.parseDouble(this.getJobConfConfig("SeedsTimes"));

        try {
            FileSystem fs = FileSystem.get(this.getJobConf());
            BufferedReader br = new BufferedReader(new InputStreamReader(fs
                    .open(new Path(this.getJobConfConfig("Seeds")))));
            for (String line = br.readLine(); line != null; line = br
                    .readLine()) {
                seeds.add(Util.getHost(line.trim()));
            }
            br.close();
            seeds.remove("");
        } catch (IOException e) {
            e.printStackTrace();
            System.exit(255);
        }
    }

}
