package com.taobao.job;

import com.taobao.vsearch.VsearchDocumentBuilder;
import org.apache.commons.lang.StringUtils;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.FileUtil;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.io.LongWritable;
import org.apache.hadoop.io.MapWritable;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.io.Writable;
import org.apache.hadoop.mapred.*;
import org.apache.lucene.analysis.Analyzer;
import org.apache.lucene.document.Document;
import org.apache.lucene.index.CorruptIndexException;
import org.apache.lucene.index.IndexWriter;
import org.apache.lucene.index.Term;
import org.apache.lucene.store.FSDirectory;
import org.apache.lucene.store.LockObtainFailedException;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;

import java.io.File;
import java.io.IOException;
import java.util.*;
import java.util.concurrent.atomic.AtomicLong;

/**
 * Created by IntelliJ IDEA.
 * User: caoling
 * Date: 11-3-14
 * Time: 1:40 pm
 * To change this template use File | Settings | File Templates.
 */
public class ItemIndexJob {

    public static final String outerDivider = "\t";
    public static final String colon = ":";


    public static class ItemMapper extends MapReduceBase
            implements Mapper<LongWritable, Text, LongWritable, MapWritable> {


        @Override
        public void map(LongWritable key, Text value, OutputCollector<LongWritable, MapWritable> output,
                        Reporter reporter) throws IOException {
            String valueString = value.toString();
            int num = 0;
            if (StringUtils.isNotBlank(valueString)) {
                String[] itemInfos = valueString.split(outerDivider);
//                System.out.println("zzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzz" + itemInfos.length);
                if (itemInfos != null && itemInfos.length > 1) {
                    for (int i = 1; i < itemInfos.length; i++) {
                        String item = itemInfos[i];
//                        System.out.println("dddddddddddddddddddd" + item);
                        LongWritable itemId = new LongWritable(Long.valueOf(item.split(colon)[0]));
                        MapWritable fieldMap = itemString2Map(item);
                        output.collect(itemId, fieldMap);
//                        if (num++ % 100 == 0) {
//                            System.out.println("555555555555555555555555555 Map " + num);
//
//                        }
                    }
                }
            }
        }

        private static MapWritable itemString2Map(String itemString) {
            if (StringUtils.isBlank(itemString)) {
                return null;
            }
            MapWritable doc = new MapWritable();
            String[] fields = itemString.split(colon);
            Long aid = Double.valueOf(fields[0]).longValue();
            doc.put(new Text("aid"), new Text(aid.toString()));
            Long uid = Double.valueOf(fields[1]).longValue();
            doc.put(new Text("uid"), new Text(uid.toString()));
            doc.put(new Text("price"), new Text(fields[2]));
            String pv = fields[3];
            doc.put(new Text("pv"), new Text(pv));

            if (StringUtils.isNotBlank(pv)){
                StringTokenizer st = new StringTokenizer(pv);
                Set<String> pSet = new HashSet<String>();
                while (st.hasMoreTokens()) {
                    //remove duplicate property
                    pSet.add(st.nextToken().split("_")[0]);
                }
                String property = StringUtils.join(pSet, " ");
                doc.put(new Text("property"), new Text(property));

            }


            doc.put(new Text("cid"), new Text(fields[4]));
            doc.put(new Text("sold30Times"), new Text(fields[5]));
            doc.put(new Text("sold1Times"), new Text(fields[6]));
            doc.put(new Text("sold30Num"), new Text(fields[7]));
            doc.put(new Text("sold1Num"), new Text(fields[8]));
            doc.put(new Text("collectTime"), new Text(fields[9]));
            doc.put(new Text("duv"), new Text(fields[10]));
            return doc;
        }
    }

    public static class ItemReduce extends MapReduceBase
            implements Reducer<LongWritable, MapWritable, LongWritable, Text> {
        static VsearchDocumentBuilder builder = new VsearchDocumentBuilder();
        private IndexWriter indexWriter;
        private static long num = 0;
        static AtomicLong indexTime = new AtomicLong();
        private JobConf _conf;
        private File file;
        static Logger logger = LoggerFactory.getLogger(ItemReduce.class);
        private Reporter _reporter;

        @Override
        public void reduce(LongWritable key, Iterator<MapWritable> values, OutputCollector<LongWritable, Text> output,
                           Reporter reporter) throws IOException {


            while (values.hasNext()) {
                reporter.progress();
                Map<String, Object> row = mapWriteable2Map(values.next());
                Document doc = null;
//                Map<String, Object> saveRow = null;
                try {
                    doc = builder.toDocument(row);
                    //TODO boosting
//                    doc.setBoost((Long) row.get("score") / 100.0f);
                    if (doc != null) {
                        indexWriter.addDocument(doc);
                        num++;
//                        if (num % 100 == 0) {
//                            System.out.println("666666666666666666666666 Reduce " + num);
//                        }
                    }
                } catch (Exception e) {
                    e.printStackTrace();
                    output.collect(key, new Text(row.toString()));
                }

                if (doc != null) {
                    Term term = builder.getIdTerm(doc);
                    try {
                        long s = System.currentTimeMillis();
                        indexWriter.updateDocument(term, doc);
                        long e = System.currentTimeMillis();
                        indexTime.addAndGet(e - s);
                    } catch (Exception e) {
                        e.printStackTrace();
                    }
                }

            }


        }

        @Override
        public void configure(JobConf job) {
            _conf = job;
            int numF = job.getInt("mapred.task.partition", -2) + 1;
            file = new File(_conf.get("hadoop.tmp.dir"), "recommend-search-" + numF);
            Analyzer analyzer = builder.getAnalyzer();
            try {
                indexWriter = new IndexWriter(FSDirectory.open(new File(file, "index")), analyzer, IndexWriter.MaxFieldLength.UNLIMITED);
                indexWriter.setMaxBufferedDocs(100000);
                indexWriter.setRAMBufferSizeMB(300);
                indexWriter.setUseCompoundFile(false);
            } catch (CorruptIndexException e) {
                e.printStackTrace();
            } catch (LockObtainFailedException e) {
                e.printStackTrace();
            } catch (IOException e) {
                e.printStackTrace();
            }
            indexWriter.setMergeFactor(100);
        }

        class WaitThread extends Thread {
            public volatile boolean stop = false;

            @Override
            public void run() {
                while (!stop) {
                    System.out.println("mmmmmmmmmmmmmmmmmmmm" + (_reporter == null));
//                    _reporter.progress();
                    try {
                        sleep(10000);
                    } catch (InterruptedException e) {

                    }
                }
            }

            public void done() {
                stop = true;
                logger.info("interrupt stop thread...");
                this.interrupt();
            }
        }

        @Override
        public void close() throws IOException {
//			reportStat("done");
//			logStat("index", indexTime.get(), num);

            WaitThread t = new WaitThread();

            logger.info("start wait thread");

            t.start();
            //_reporter.setStatus("Optimizing index...");
            indexWriter.setUseCompoundFile(false);
            System.out.println("==================================" + indexWriter.getDirectory().listAll().length);
            indexWriter.commit();
            logger.info("start optimize");
            try {
                indexWriter.optimize();
            } catch (Exception e) {
                logger.info("rollback by optimize fail", e);
                indexWriter.rollback();
            }

            logger.info("opt done, start index writer close...");
            indexWriter.close();

            logger.info("index writer close done");

            FileSystem fileSystem = FileSystem.get(_conf);

            logger.info("copy from local file...");
            Path destination = new Path(_conf.get("finalDestination"));
            fileSystem.copyFromLocalFile(new Path(file.getAbsolutePath()), destination);

            FileUtil.fullyDelete(file);


            t.done();
            logger.info("run end!");
        }


        private static Map<String, Object> mapWriteable2Map(MapWritable mw) {
            Map<String, Object> map = new HashMap<String, Object>();
            Iterator<Map.Entry<Writable, Writable>> iter = mw.entrySet().iterator();
            while (iter.hasNext()) {
                Map.Entry<Writable, Writable> entry = iter.next();
                String value = entry.getValue().toString();
                map.put(entry.getKey().toString(), StringUtils.isNotBlank(value) ? value : "0");
            }
            return map;

        }

    }


    public static void main(String[] args)
            throws Exception {
        JobConf conf = new JobConf(ItemIndexJob.class);
        conf.setJobName("ItemIndexJob");
        conf.setOutputKeyClass(LongWritable.class);
        conf.setOutputValueClass(Text.class);

        conf.setMapperClass(ItemMapper.class);
        conf.setReducerClass(ItemReduce.class);

        conf.setMapOutputKeyClass(LongWritable.class);
        conf.setMapOutputValueClass(MapWritable.class);

        String luceneIndexpath = "luceneIndex" + System.currentTimeMillis();
        conf.set("finalDestination", luceneIndexpath);

        String outputPath = "index" + System.currentTimeMillis();
        FileInputFormat.setInputPaths(conf, new Path[]{new Path("itemRaw")});
        FileOutputFormat.setOutputPath(conf, new Path(outputPath));

        JobClient.runJob(conf);
    }


}
