package 美国疫情;

import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.filecache.DistributedCache;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.io.NullWritable;
import org.apache.hadoop.mapreduce.Job;
import org.apache.hadoop.mapreduce.lib.db.DBConfiguration;
import org.apache.hadoop.mapreduce.lib.db.DBOutputFormat;
import org.apache.hadoop.mapreduce.lib.input.FileInputFormat;
import org.apache.log4j.BasicConfigurator;


public class DriTest {
    public static void main(String[] args) throws Exception {
        BasicConfigurator.configure();
        Configuration conf = new Configuration();
       // conf.set("fs.defaultFS","hdfs://192.168.0.155:9000");
        DBConfiguration.configureDB(conf, "com.mysql.jdbc.Driver", "jdbc:mysql://192.168.0.155:3306/data?useUnicode=true&characterEncoding=UTF-8", "root", "123456");
        Job job = Job.getInstance(conf);
        job.setJarByClass(DriTest.class);

        job.setMapperClass(MapTest.class);
        job.setMapOutputKeyClass(JavaBean.class);
        job.setMapOutputValueClass(NullWritable.class);

        job.setReducerClass(RedTest.class);
        job.setOutputKeyClass(MysqlBean.class);
        job.setOutputValueClass(NullWritable.class);

        job.addArchiveToClassPath(new Path("hdfs://d:9000/MR/jar/mysql-connector-java-5.1.40-bin.jar"));
        DistributedCache.addFileToClassPath(new Path("hdfs://d:9000/MR/jar/mysql-connector-java-5.1.40-bin.jar"),conf);

        job.setGroupingComparatorClass(Group.class);
        String f1[] = {"date", "state", "fips", "cases", "deaths"};
        FileInputFormat.setInputPaths(job, "D:\\MP\\美国疫情\\input\\us_counties_covid19_daily.txt");
        DBOutputFormat.setOutput(job, "cases", f1);

        System.exit(job.waitForCompletion(true) ? 0 : 1);
    }
}
