package edu.zju.gis.dbfg.model.tile;

import edu.zju.gis.dbfg.common.exception.ModelFailedException;
import edu.zju.gis.dbfg.common.util.FileUtil;
import edu.zju.gis.dbfg.model.PyramidConfig;
import edu.zju.gis.dbfg.model.util.TileID;
import edu.zju.gis.dbfg.model.util.TileUtil;
import edu.zju.gis.dbfg.model.util.ZLevelInfo;
import org.apache.spark.SparkConf;
import org.apache.spark.api.java.JavaSparkContext;
import org.geotools.geometry.jts.JTS;
import org.geotools.referencing.CRS;
import org.locationtech.jts.geom.Envelope;
import org.locationtech.jts.geom.Geometry;
import org.locationtech.jts.geom.GeometryFactory;
import org.locationtech.jts.io.WKTReader;
import org.opengis.referencing.crs.CoordinateReferenceSystem;
import org.opengis.referencing.operation.MathTransform;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;

import java.io.File;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.List;
import java.util.stream.Collectors;

/**
 * 基本思路：
 * 将输入数据划分为多个格网，每一个格网进行一次迭代的全量计算
 * @author Hu
 * @Date 2019/10/28
 */
public class MVectorTileClipper {

    private final static Logger logger = LoggerFactory.getLogger(MVectorTileClipper.class);

    public static void main(String[] args) throws Exception {

        // Setup environment
        logger.info("Setup Spark Context");
        SparkConf conf = new SparkConf();
        conf.setMaster("local[8]");
        conf.setAppName("lalala");
        JavaSparkContext jsc = new JavaSparkContext(conf);

        // Setup model arguments
        logger.info("Setup arguments");
        VectorTileClipperArgs mArgs = VectorTileClipperArgs.initArgs(args);

        if (mArgs == null) {
            String msg = "init args failed, exit";
            logger.error(msg);
            throw new ModelFailedException(VectorTileClipper.class, "main()", msg, args);
        }

        if (mArgs.getJobSplitZLevel() > mArgs.getZMin()) {
            mArgs.setJobSplitZLevel(mArgs.getZMin());
        }

        File outDir = new File(mArgs.getOutDir());
        if (!outDir.exists()) {
            outDir.mkdirs();
            logger.info("create new directory for output tiles: " + outDir.getAbsolutePath());
        }

        CoordinateReferenceSystem targetCrs = CRS.parseWKT(mArgs.getTargetCRS());
        CoordinateReferenceSystem sourceCrs = CRS.parseWKT(mArgs.getSourceCRS());

        List<Double> targetCrsExtents = Arrays.stream(mArgs.getExtent().split(",")).map(new java.util.function.Function<String, Double>() {
            @Override
            public Double apply(String s) {
                return Double.valueOf(s);
            }
        }).collect(Collectors.toList());

        // 定义金字塔网格参数
        PyramidConfig pyramidConfig = new PyramidConfig.PyramidConfigBuilder()
                .setBaseMapEnv(targetCrsExtents.get(0), targetCrsExtents.get(1), targetCrsExtents.get(2), targetCrsExtents.get(3))
                .setCrs(targetCrs)
                .setzLevelRange(mArgs.getZMin(), mArgs.getZMax())
                .build();

        WKTReader r = new WKTReader();
        String qgWkt = FileUtil.readByLine(mArgs.getQgWktFile(), 1, false).get(0);
        Geometry qg = r.read(qgWkt);
        GeometryFactory f = new GeometryFactory();

        if (!mArgs.getTargetCRS().equals(mArgs.getSourceCRS())) {
            MathTransform transform = CRS.findMathTransform(sourceCrs, targetCrs);
            qg = JTS.transform(qg, transform);
        }

        Envelope jobEnvelope = qg.getEnvelopeInternal();

        ZLevelInfo[] jobLevels = TileUtil.initZLevelInfoPZ(pyramidConfig, jobEnvelope);

        List<TileID> jobTiles = new ArrayList<>();
        ZLevelInfo jobLevel = jobLevels[mArgs.getJobSplitZLevel() - mArgs.getZMin()];
        int tx_min = jobLevel.getTileRanges()[0];
        int tx_max = jobLevel.getTileRanges()[1];
        int ty_min = jobLevel.getTileRanges()[2];
        int ty_max = jobLevel.getTileRanges()[3];
        for (int x = tx_min; x <= tx_max; x++) {
            for (int y = ty_min; y <= ty_max; y++) {
                TileID t = new TileID();
                t.setX(x);
                t.setY(y);
                t.setzLevel(mArgs.getJobSplitZLevel());
                Envelope en = TileUtil.createTileBox(t, pyramidConfig);
                Geometry eng = f.toGeometry(en);
                boolean flag = eng.intersects(qg);
                if (flag) {
                    jobTiles.add(t);
                }
            }
        }

        logger.info(String.format(" ============ TOTAL TILE NUM: %d =========== ", jobTiles.size()));

        List<Integer> tileIndex = new ArrayList<>();
        for (int i=0; i<jobTiles.size(); i++) {
            tileIndex.add(i);
        }

        // Setup data from datasource
        logger.info("Setup datasource");
        String[] datasources = mArgs.getInput().split(",");
        for (int i = 0; i < datasources.length; i++) {
            String datasource = datasources[i];
            if (!datasource.startsWith("es://")) {
                String msg = String.format("Unvalid datasource %s, only support elasticsearch datastore yet (url schema as 'es://{indexname}')", datasource);
                logger.error(msg);
                throw new ModelFailedException(VectorTileClipper.class, "main()", msg, args);
            }
            datasources[i] = datasource.replace("es://", "");
        }

        MTileJob mTileJob = new MTileJob(pyramidConfig, mArgs, sourceCrs, targetCrs, datasources);

        for (int i=0; i<jobTiles.size(); i++) {
            TileID jobTile = jobTiles.get(i);
            mTileJob.buildTile(jsc, jobTile);
        }

    }



}
