package org.solrcn.hadoop.index;

import java.io.IOException;
import java.net.MalformedURLException;
import java.util.ArrayList;
import java.util.Collection;
import java.util.List;
import java.util.Random;

import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.conf.Configured;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.io.IntWritable;
import org.apache.hadoop.io.LongWritable;
import org.apache.hadoop.io.NullWritable;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.mapreduce.InputSplit;
import org.apache.hadoop.mapreduce.Job;
import org.apache.hadoop.mapreduce.Mapper;
import org.apache.hadoop.mapreduce.Partitioner;
import org.apache.hadoop.mapreduce.Reducer;
import org.apache.hadoop.mapreduce.lib.input.FileInputFormat;
import org.apache.hadoop.mapreduce.lib.input.FileSplit;
import org.apache.hadoop.mapreduce.lib.input.TextInputFormat;
import org.apache.hadoop.mapreduce.lib.output.FileOutputFormat;
import org.apache.hadoop.mapreduce.lib.output.NullOutputFormat;
import org.apache.hadoop.util.GenericOptionsParser;
import org.apache.hadoop.util.Tool;
import org.apache.hadoop.util.ToolRunner;
import org.apache.solr.client.solrj.SolrServerException;
import org.apache.solr.client.solrj.impl.CloudSolrServer;
import org.apache.solr.client.solrj.impl.HttpSolrServer;
import org.apache.solr.client.solrj.request.UpdateRequest;
import org.apache.solr.common.SolrInputDocument;
import org.apache.solr.common.cloud.ClusterState;
import org.apache.solr.common.cloud.Slice;
import org.apache.solr.common.cloud.ZkCoreNodeProps;
import org.joda.time.DateTime;

public class SolrMR extends Configured implements Tool {

	public static class MapClass extends Mapper<LongWritable, Text, NullWritable, NullWritable> {

		ArrayList<SolrInputDocument> docs = new ArrayList<SolrInputDocument>();
		List<HttpSolrServer> servers = new ArrayList<HttpSolrServer>();
		String fileName;
		String zkHost;
		String collection;
		int collectionNO;
		int shard;
		int shardNums;
		int commitTimes;
		DateTime dt;
		final Random r = new Random();

		CloudSolrServer server;

		@Override
		public void setup(Context context) throws MalformedURLException {
			zkHost = context.getConfiguration().get("zkHost", "sc1:2181");
			collectionNO = context.getConfiguration().getInt("collectionNO", 1);
			collection = "collection" + collectionNO;
			server = new CloudSolrServer(zkHost);
			server.setDefaultCollection(collection);
			server.connect();

			ClusterState clusterState = server.getZkStateReader().getClusterState();
			Collection<Slice> activeSlices = clusterState.getActiveSlices(collection);
			shardNums = activeSlices.size();
			for (Slice slice : activeSlices) {
				ZkCoreNodeProps zkCoreNodeProps = new ZkCoreNodeProps(slice.getLeader());
				String baseUrl = zkCoreNodeProps.getBaseUrl() + "/" + zkCoreNodeProps.getCoreName();
				servers.add(new HttpSolrServer(baseUrl));
			}

			int year = (collectionNO / 24 / 30 / 12) % 10 + 2010;
			int month = ((collectionNO / 24) / 30) % 12 + 1;
			int day = (collectionNO / 24) % 30 + 1;
			int hour = collectionNO % 24;

			dt = new DateTime(year, month, day, hour, 0, 0);

			InputSplit inputSplit = context.getInputSplit();
			fileName = ((FileSplit) inputSplit).getPath().toString();

		}

		@Override
		public void map(LongWritable key, Text value, Context context) throws IOException, InterruptedException {

			long rows = key.get();

			// 随机产生一小时内任意秒数
			long rndTime = r.nextInt(3600) * 1000;
//			String shardId = "shard" + ((rows % shardNums) + 1);

			SolrInputDocument doc = new SolrInputDocument();

			doc.setField("id", java.util.UUID.randomUUID().toString());
			String[] str = value.toString().split("\t");
			// doc.setField("id", fileName + key.toString()+"b");
//			doc.setField("_shard_", shardId);
			doc.setField("rows_s", rows);
			doc.setField("data_" + "sjs" + "_i", (r.nextInt(5000)));
			if (str.length > 31) {
				doc.setField("data_" + 1 + "_s", str[0]);
				try {

					doc.setField("data_" + 4 + "_l", Long.parseLong(str[3]));
				} catch (NumberFormatException e) {
					doc.setField("data_" + 4 + "_l", 0);
				}
				try {

					doc.setField("data_" + 5 + "_l", Long.parseLong(str[4]));
				} catch (NumberFormatException e) {
					doc.setField("data_" + 5 + "_l", 0);
				}
				try {

					doc.setField("data_" + 7 + "_l", Long.parseLong(str[6]));
				} catch (NumberFormatException e) {
					doc.setField("data_" + 7 + "_l", 0);
				}
				doc.setField("data_" + 32 + "_cjk", str[31]);
				doc.setField("bloomcjk", str[31]);
			}

			// 时间日期格式
			doc.setField("time1_dt", dt.plus(rndTime).toDate());
			// 普通存储long和int
			doc.setField("time2_l", dt.plus(rndTime).getMillis());
			doc.setField("time3_i", dt.plus(rndTime).getMillis()/1000);
			// DocValue属性long和int
			doc.setField("docValuesLong", dt.plus(rndTime).getMillis());
			doc.setField("docValuesInt", dt.plus(rndTime).getMillis()/1000);
			// 不存储long和int
			// doc.setField("time_long", dt.plus(rndTime).getMillis());
			// doc.setField("time_int", dt.plus(rndTime).getMillisOfSecond());

			docs.add(doc);

			if (docs.size() > 1000) {
				UpdateRequest updateRequest = new UpdateRequest("/update/fast");
				updateRequest.add(docs);
				try {

					// long now = i * day * shardNums;
					// for (int j = 0, len = docs.size(); j < len; j++) {
					// long newtime = (long) docs.get(i).getDocumentBoost();
					// docs.get(i).setDocumentBoost(now + newtime);
					// docs.get(i).setField("time_dt", new Date(now + newtime));
					// }

					servers.get(commitTimes % servers.size()).request(updateRequest);
					commitTimes++;
					// servers.get(i).add(docs, 15000);
					// servers[i].commit();
					docs.clear();

				} catch (SolrServerException e) {
					e.printStackTrace();
				} finally {
					doc = new SolrInputDocument();
				}


			}
		}

		@Override
		public void cleanup(Context context) {
			if (docs.size() > 0) {
				UpdateRequest updateRequest = new UpdateRequest("/update/fast");
				updateRequest.add(docs);
				try {
					// for (int i = 0; i < servers.size(); i++) {
					// //servers[i].add(docs, 15000);
					// servers.get(i).commit(true,true);
					// }
					servers.get(commitTimes % servers.size()).request(updateRequest);
				} catch (SolrServerException e) {
					e.printStackTrace();
				} catch (IOException e) {
					e.printStackTrace();
				}

				docs.clear();
			}

		}
	}

	public static class Reduce extends Reducer<Text, Text, Text, IntWritable> {
		public int max = -1;

		public void reduce(Text key, Iterable<Text> values, Context context) throws IOException, InterruptedException {
			max = -1;
			for (Text val : values) {
				String[] str = val.toString().split("\t", -2);
				if (Integer.parseInt(str[3]) > max)
					max = Integer.parseInt(str[3]);
			}
			context.write(new Text(key), new IntWritable(max));
		}
	}

	public static class AgePartitioner extends Partitioner<Text, Text> {
		@Override
		public int getPartition(Text key, Text value, int numReduceTasks) {
			String[] str = value.toString().split("\t");
			int age = Integer.parseInt(str[1]);
			if (numReduceTasks == 0) {
				return 0;
			}
			if (age <= 20)
				return 0;
			else if (age > 20 && age <= 50)
				return 1 % numReduceTasks;
			else
				return 2 % numReduceTasks;
		}
	}

	@Override
	public void setConf(Configuration conf) {

	}

	@Override
	public Configuration getConf() {
		return new Configuration();
	}

	public int run(String[] args) throws Exception {
		Configuration conf = getConf();
		String[] otherArgs = new GenericOptionsParser(conf, args).getRemainingArgs();
		if (otherArgs.length < 2) {
			System.err.println("Usage: SolrMR <in> <out> [zkHost=localhost:2181] [collectionNO=1]");
			System.exit(2);
		}

		for (int i = 2; i < otherArgs.length; i++) {
			conf.set(otherArgs[i].split("=")[0], otherArgs[i].split("=")[1]);
		}

		Job job = new Job(conf, "Solr Index");
		job.setJarByClass(SolrMR.class);
		FileInputFormat.addInputPath(job, new Path(otherArgs[0]));
		FileInputFormat.setMinInputSplitSize(job, 512 * 1024 * 1024L);
		FileOutputFormat.setOutputPath(job, new Path(otherArgs[1]));
		job.setMapperClass(MapClass.class);
		// job.setMapOutputKeyClass(NullWritable.class);
		// job.setMapOutputValueClass(NullWritable.class);
		// //job.setCombinerClass(Reduce.class);
		// job.setPartitionerClass(AgePartitioner.class);
		// job.setReducerClass(Reduce.class);
		job.setNumReduceTasks(0);
		job.setInputFormatClass(TextInputFormat.class);
		job.setOutputFormatClass(NullOutputFormat.class);

		// job.setOutputFormatClass(TextOutputFormat.class);
		// job.setOutputKeyClass(NullWritable.class);
		// job.setOutputValueClass(NullWritable.class);

		System.exit(job.waitForCompletion(true) ? 0 : 1);
		return 0;
	}

	public static void main(String[] args) throws Exception {

		int res = ToolRunner.run(new Configuration(), new SolrMR(), args);
		System.exit(res);
	}

}
