package main

import org.apache.hadoop.io.IntWritable
import org.apache.hadoop.io.LongWritable
import org.apache.hadoop.io.Text
import org.apache.hadoop.mapreduce.Mapper
import java.util.StringTokenizer

class CourseScoreAverageMapper : Mapper<LongWritable, Text, Text, IntWritable>() {
  override fun map(key: LongWritable?, value: Text?, context: Context?) {
    val line = String(value!!.bytes, 0, value.length)
    val countPrint = context?.getCounter("输出", line)
    countPrint!!.increment(1)
    val tokenArticle = StringTokenizer(line, "\n")
    while (tokenArticle.hasMoreTokens()) {
      val tokenLine = StringTokenizer(tokenArticle.nextToken())
      val strName = tokenLine.nextToken()
      val strScore = tokenLine.nextToken()
      val name = Text(strName)
      val scoreInt = strScore.toInt()
      context.write(name, IntWritable(scoreInt))
      context.getCounter("map: ", "<${strName}, ${strScore}>")
      countPrint.increment(1)
    }
  }
}