package cn.edu.cug.cs.gtl.docsrv.shp.manager;

import cn.edu.cug.cs.gtl.common.Pair;
import cn.edu.cug.cs.gtl.docsrv.config.Configuration;
import cn.edu.cug.cs.gtl.docsrv.doc.DocManager;
import cn.edu.cug.cs.gtl.docsrv.shp.querier.BlockQuerier;
import cn.edu.cug.cs.gtl.docsrv.shp.querier.DefaultBlockQuerier;
import cn.edu.cug.cs.gtl.docsrv.shp.updater.BlockUpdater;
import cn.edu.cug.cs.gtl.docsrv.shp.updater.DefaultBlockUpdater;
import cn.edu.cug.cs.gtl.io.FileDataSplitter;
import cn.edu.cug.cs.gtl.mybatis.Session;
import cn.edu.cug.cs.gtl.mybatis.mapper.doc.DocAll;
import cn.edu.cug.cs.gtl.mybatis.mapper.shp.*;

import cn.edu.cug.cs.gtl.mybatis.metadata.ShpUtils;
import cn.edu.cug.cs.gtl.net.ftp.client.ClientFTP;
import cn.edu.cug.cs.gtl.net.hdfs.client.ClientHDFS;
import cn.edu.cug.cs.gtl.protos.TimeValue;
import cn.edu.cug.cs.gtl.protoswrapper.TimeValueWrapper;
import org.apache.hadoop.fs.FSDataInputStream;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.io.IOUtils;
import org.locationtech.jts.geom.Envelope;


import java.io.*;
import java.math.BigDecimal;
import java.net.URI;
import java.util.*;

/**
 * 基于Oracle+HDFS实现的文档数据管理器
 * DOC_ITEM表中存放文档数据的条目信息
 * HDFS中存放文档文件
 * 通过DOC_ITEM表中的DOC_URL指向服务器中的文件全名
 */
public class HDFSShpManager extends ShpManager {
    ClientHDFS clientHDFS;
    /**
     * 将ShpUrl解析成HDFS服务器上的remoteDir和remoteFileName
     * @param url  格式为  hdfs://202.114.194.1:9000/docsrv/shp/shpExample.zip
     * @return 解析后的结果为
     * remoteDir = /docsrv/shp
     * remoteFileName=shpExample.zip
     */
    private static  Pair<String,String> parseShpUrl(String url){
        String[] ss = url.split("/");
        String remoteFileName = ss[ss.length - 1];
        StringBuilder stringBuilder = new StringBuilder();
        for (int i = 3; i < ss.length - 1; i++) {
            stringBuilder.append("/");
            stringBuilder.append(ss[i]);
        }
        String remoteDir = stringBuilder.toString();
        Pair<String,String> p = new Pair<>(remoteDir,remoteFileName);
        return p;
    }

    /**
     * @param session
     * @param hdfs
     */
    public HDFSShpManager(Session session, ClientHDFS hdfs) {
        super(session);
        clientHDFS = hdfs;
    }
    /**
     * 将一个由shpItem+shpFile组成的图形文档插入数据库
     *
     * @param shpItem 图形元数据信息,
     *                其中的shpUrl一定要设置值，形式如下
     *                hdfs://202.114.194.1:9000/docsrv/shp/shpExample.zip
     * @param shpFile 图形文件内容
     * @return
     */
    @Override
    public boolean insert(ShpItem shpItem, ShpFile shpFile) {
        try {
            //上传HDFS
            Pair<String, String> p = parseShpUrl(shpItem.getShpUrl());
            clientHDFS.uploadBytes(shpFile.getShpRawBytes(), p.first(), p.second());
            //写入元数据
            ShpItemMapper shpItemMapper = session.getMapper(ShpItemMapper.class);
            shpItemMapper.insert(shpItem);
            session.commit();
            return true;
        }
        catch (Exception e){
            e.printStackTrace();
        }
        return false;
    }

    /**
     * 将ShpAll分解成ShpItem和ShpFile，
     * 然后将ShpItem插入数据表SHP_ITEM，
     * 将ShpFile插入数据表SHP_FILE，
     * @param shpAll
     * @return
     */
    @Override
    public boolean insert(ShpAll shpAll) {
        Pair<ShpItem,ShpFile> p = ShpUtils.fromShpAll(shpAll);
        return insert(p.getFirst(),p.getSecond());
    }

    /**
     * 读取本地目录中的所有图形文档，将其插入图形库中
     *
     * @param localDir 本地目录
     * @return 成功返回true, 否则返回false
     */
    @Override
    public boolean insert(String localDir) {
        try {
            File file = new File(localDir);
            if (!file.isDirectory())
                return false;
            File[] files = file.listFiles();
            for (File f : files) {
                if(f.isDirectory()){
                    return insert(f.getCanonicalPath());
                }

                Pair<ShpItem,ShpFile> p = parseShpFiles(f);

                if(p==null)  continue;

                //设置SHPItem
                ShpItemMapper shpItemMapper = session.getMapper(ShpItemMapper.class);
                p.first().setShpInternalId(shpItemMapper.nextInternalId());
                StringBuilder shpUrl=new StringBuilder();
                shpUrl.append("ftp://");
                shpUrl.append(cn.edu.cug.cs.gtl.docsrv.config.Configuration.getFtpConfig().getFtpIP());
                shpUrl.append(":");
                shpUrl.append(String.valueOf(cn.edu.cug.cs.gtl.docsrv.config.Configuration.getFtpConfig().getFtpPort()));
                shpUrl.append(Configuration.getFtpConfig().getFtpWorkingDirectory());
                shpUrl.append("/");
                shpUrl.append(f.getName().replace("shp","zip").trim());
                p.first().setShpUrl(shpUrl.toString());


                shpItemMapper.insert(p.first());
                session.commit();
                //上传HDFS
                Pair<String, String> ps= parseShpUrl(shpUrl.toString());
                clientHDFS.uploadBytes(p.second().getShpRawBytes(), ps.first(), ps.second());
            }
        }
        catch (Exception e){
            e.printStackTrace();
        }
        return true;
    }

    /**
     * 根据shpInternalId删除数据库中对应的图形文档记录
     *
     * @param shpItem 必须填充shpInternalId和shpUrl
     * @return
     */
    @Override
    public boolean delete(ShpItem shpItem) {
        try{
            ShpItemMapper shpItemMapper = session.getMapper(ShpItemMapper.class);
            shpItemMapper.deleteByPrimaryKey(shpItem.getShpInternalId());
            Pair<String,String> p = parseShpUrl(shpItem.getShpUrl());
            return clientHDFS.deleteFile(p.first(),p.second());
        }
        catch (Exception e){
            e.printStackTrace();
        }
        return false;
    }

    /**
     * 根据Id删除图形库中的元数据和内容数据
     * @param shpAll 必须填充shpInternalId和shpUrl
     * @return
     */
    @Override
    public boolean delete(ShpAll shpAll) {
        Pair<ShpItem,ShpFile> p = ShpUtils.fromShpAll(shpAll);
        return delete(p.first());
    }

    /**
     * 根据shpItem+shpFile更新数据库中的图形文档记录
     *
     * @param shpItem 该对象的shpInternalId必须填充,
     *                shpUrl必须填充
     * @param shpFile 更新文档文件的内容
     * @return 更新成功返回true, 否则返回false
     */
    @Override
    public boolean update(ShpItem shpItem, ShpFile shpFile) {
        try{
            ShpItemMapper shpItemMapper = session.getMapper(ShpItemMapper.class);
            shpItemMapper.updateByPrimaryKey(shpItem);
            Pair<String,String> p = parseShpUrl(shpItem.getShpUrl());
            boolean b = clientHDFS.exists(new Path(p.first()+"/"+p.second()));
            if(b) clientHDFS.deleteFile(p.first(),p.second());
            return clientHDFS.uploadBytes(shpFile.getShpRawBytes(),p.first(),p.second());
        }
        catch (Exception e){
            e.printStackTrace();
        }
        return false;
    }

    /**
     *根据ShpAll中的内容更新图形库中对应的信息
     * @param shpAll 该对象的shpInternalId必须填充
     * @return
     */
    @Override
    public boolean update(ShpAll shpAll) {
        Pair<ShpItem,ShpFile> p = ShpUtils.fromShpAll(shpAll);
        return update(p.first(),p.second());
    }

    /**
     * 根据ID查询图形块信息
     * @param shpInternalId
     * @return
     */
    @Override
    public ShpAll queryByInternalId(BigDecimal shpInternalId) {
        try{
            ShpItemMapper shpItemMapper = session.getMapper(ShpItemMapper.class);
            ShpItem shpItem = shpItemMapper.selectByPrimaryKey(shpInternalId);
            Pair<String,String> p = parseShpUrl(shpItem.getShpUrl());
            byte [] bytes = clientHDFS.downloadBytes(p.first(),p.second());
            ShpFile shpFile = new ShpFile();
            shpFile.setShpInternalId(shpInternalId);
            shpFile.setShpRawBytes(bytes);
            ShpAll shpAll = ShpUtils.toShpAll(shpItem,shpFile);
            return shpAll;
        }
        catch (Exception e){
            e.printStackTrace();
        }
        return null;
    }

    /**
     * 根据英文标题查询图形内容
     * @param shpEnTitle
     * @return
     */
    @Override
    public List<ShpAll> queryByEnTitle(String shpEnTitle){
        List<ShpAll> res = new ArrayList<>();
        try{
            ShpItemMapper shpItemMapper = session.getMapper(ShpItemMapper.class);
            List<ShpItem> shpItems = shpItemMapper.selectByEnTitle(shpEnTitle);
            for(ShpItem shpItem: shpItems){
                Pair<String,String> p = parseShpUrl(shpItem.getShpUrl());
                byte [] bytes = clientHDFS.downloadBytes(p.first(),p.second());
                ShpFile shpFile = new ShpFile();
                shpFile.setShpInternalId(shpItem.getShpInternalId());
                shpFile.setShpRawBytes(bytes);
                ShpAll shpAll = ShpUtils.toShpAll(shpItem,shpFile);
                res.add(shpAll);
            }
            return res;
        }
        catch (Exception e){
            e.printStackTrace();
        }
        return null;
    }

    /**
     * 根据中文标题查询图形内容
     * @param shpCnTitle  文档注释
     * @return
     */
    @Override
    public List<ShpAll> queryByComment(String shpCnTitle) {
        List<ShpAll> res = new ArrayList<>();
        try{
            ShpItemMapper shpItemMapper = session.getMapper(ShpItemMapper.class);
            List<ShpItem> shpItems = shpItemMapper.selectByCnTitle(shpCnTitle);
            for(ShpItem shpItem: shpItems){
                Pair<String,String> p = parseShpUrl(shpItem.getShpUrl());
                byte [] bytes = clientHDFS.downloadBytes(p.first(),p.second());
                ShpFile shpFile = new ShpFile();
                shpFile.setShpInternalId(shpItem.getShpInternalId());
                shpFile.setShpRawBytes(bytes);
                ShpAll shpAll = ShpUtils.toShpAll(shpItem,shpFile);
                res.add(shpAll);
            }
            return res;
        }
        catch (Exception e){
            e.printStackTrace();
        }
        return null;
    }

    /**
     * 根据类型查询图形信息
     * @param shpType
     * @return
     */
    @Override
    public List<ShpAll> queryByType(String shpType) {
        List<ShpAll> res = new ArrayList<>();
        try{
            ShpItemMapper shpItemMapper = session.getMapper(ShpItemMapper.class);
            List<ShpItem> shpItems = shpItemMapper.selectByType(shpType);
            for(ShpItem shpItem: shpItems){
                Pair<String,String> p = parseShpUrl(shpItem.getShpUrl());
                byte [] bytes = clientHDFS.downloadBytes(p.first(),p.second());
                ShpFile shpFile = new ShpFile();
                shpFile.setShpInternalId(shpItem.getShpInternalId());
                shpFile.setShpRawBytes(bytes);
                ShpAll shpAll = ShpUtils.toShpAll(shpItem,shpFile);
                res.add(shpAll);
            }
            return res;
        }
        catch (Exception e){
            e.printStackTrace();
        }
        return null;
    }

    /**
     * 根据添加入库时间查询图形
     * @param begin 开始时间
     * @param end  结束时间
     * @return
     */
    @Override
    public List<ShpAll> queryByAppendDate(TimeValue begin, TimeValue end) {
        List<ShpAll> res = new ArrayList<>();
        try{
            ShpItemMapper shpItemMapper = session.getMapper(ShpItemMapper.class);
            List<ShpItem> shpItems =  shpItemMapper.selectByAppendDate(TimeValueWrapper.toUtilDate(begin),TimeValueWrapper.toUtilDate(end));
            for(ShpItem shpItem: shpItems){
                Pair<String,String> p = parseShpUrl(shpItem.getShpUrl());
                byte [] bytes = clientHDFS.downloadBytes(p.first(),p.second());
                ShpFile shpFile = new ShpFile();
                shpFile.setShpInternalId(shpItem.getShpInternalId());
                shpFile.setShpRawBytes(bytes);
                ShpAll shpAll = ShpUtils.toShpAll(shpItem,shpFile);
                res.add(shpAll);
            }
            return res;
        }
        catch (Exception e){
            e.printStackTrace();
        }
        return null;
    }

    /**
     * 根据传入参数在图形库中进行查询
     *       try {
     *             Map<String,Object> p = new LinkedHashMap<String,Object>();
     *             p.put("shpInternalId",BigDecimal.valueOf(1));
     *             List<ShpItem> r1 = this.query(p);
     *             p.clear();
     *             p.put("shpEnTile","aaa");
     *             p.put("shpCnTile","zhangsan");
     *             List<ShpItem> r2 = this.query(p);
     *             boolean b = r1.size()==r2.size();
     *         }
     *         catch (Exception e){
     *             e.printStackTrace();
     *         }
     * @param parameters
     *     BigDecimal shpInternalId;
     *
     *     String shpEnTitle;
     *
     *     String shpCnTitle;
     *
     *     String shpUuid;
     *
     *     String shpVersion;
     *
     *     String shpKeywords;
     *
     *     String shpAbstract;
     *
     *     String shpContent;
     *
     *     String shpType;
     *
     *     String shpAuthors;
     *
     *     String shpAffiliation;
     *
     *     String shpTags;
     *
     *     String shpUrl;
     *
     *     Date shpAppendDate;
     *
     *     Date shpUpdateDate;
     *
     *     Date shpCreateDate;
     *
     *     String shpMemo;
     *
     *     BigDecimal shpCatalog;
     *
     *     String shpExtent;
     *
     *     byte[] shpRawBytes;
     * @return 返回查询到的文档。
     */
    @Override
    public List<ShpAll> query(Map<String, Object> parameters) {

        List<ShpAll> res = new ArrayList<>();
        try{
            ShpItemMapper shpItemMapper = session.getMapper(ShpItemMapper.class);
            List<ShpItem> shpItems =  shpItemMapper.query(parameters);
            for(ShpItem shpItem: shpItems){
                Pair<String,String> p = parseShpUrl(shpItem.getShpUrl());
                byte [] bytes = clientHDFS.downloadBytes(p.first(),p.second());
                ShpFile shpFile = new ShpFile();
                shpFile.setShpInternalId(shpItem.getShpInternalId());
                shpFile.setShpRawBytes(bytes);
                ShpAll shpAll = ShpUtils.toShpAll(shpItem,shpFile);
                res.add(shpAll);
            }
            return res;
        }
        catch (Exception e){
            e.printStackTrace();
        }
        return null;
    }

    /**
     * 获取图形块ShapeBlock级别的查询器
     *
     * @return
     */
    @Override
    public BlockQuerier getBlockQuerier() {

        return new DefaultBlockQuerier(this);
    }

    /**
     * 获取图形块ShapeBlock级别的更新器
     * @return
     */
    @Override
    public BlockUpdater getBlockUpdater() {
        return new DefaultBlockUpdater(this);
    }

    /**
     * 查询得到所有ShapeBlock的边界矩形
     * @return
     */
    @Override
    public  List<Pair<BigDecimal, Envelope>> queryAllEnvelopes(){
        try {
            ShpItemMapper shpItemMapper = session.getMapper(ShpItemMapper.class);
            List<LinkedHashMap<String, Object>> ls = shpItemMapper.selectAllInternalIdAndExtents();
            System.out.println(ls.size());
            List<Pair<BigDecimal, Envelope>> r = new ArrayList<>();
            for (LinkedHashMap<String, Object> l: ls){
                BigDecimal id = (BigDecimal)l.get("SHP_INTERNAL_ID");
                String coords = (String)l.get("SHP_EXTENT");
                String [] cs = coords.split(FileDataSplitter.COMMA.getDelimiter());
                double minX = Double.valueOf(cs[0]).doubleValue();
                double maxX = Double.valueOf(cs[1]).doubleValue();
                double minY = Double.valueOf(cs[2]).doubleValue();
                double maxY = Double.valueOf(cs[3]).doubleValue();
                Envelope envelope = new Envelope(minX,maxX,minY,maxY);
                r.add(new Pair<>(id,envelope));
            }
            return r;
        }
        catch (Exception e){
            e.printStackTrace();
        }
        return null;
    }

//
//    Configuration conf; //hadoop 配置，使用DataNode hostname
//    String url; // hdfs url hdfs://202.114.194.1:9000
//
//    public HDFSShpManager(Session s) {
//        super(s);
//        this.conf = new Configuration();
//        this.url = "hdfs://202.114.194.1:9000";
//        conf.set("dfs.client.use.datanode.hostname","true");
//    }
//
//    /**
//     * 将一个由shpItem+shpFile组成的图形文档插入数据库
//     *
//     * @param shpItem 图形元数据信息
//     * @param shpFile 图形文件内容
//     * @return
//     */
//    @Override
//    public boolean insert(ShpItem shpItem, ShpFile shpFile) {
//        return false;
//    }
////
////    @Override
////    public List<ShpAll> extractShpDocument(DocManager docManager, TimeValue begin, TimeValue end) {
////        List<ShpAll> res = new ArrayList<>();
////        List<DocAll> docAllList = docManager.queryByAppendDate(begin, end);
////        for (DocAll docAll: docAllList){
////            ShpAll shpAll = new ShpAll();
////            ShpUtils.toShpAll(docAll);
////            extractShpAllByHDFS(shpAll);
////            res.add(shpAll);
////        }
////        return res;
////    }
////
////    @Override
////    public ShpAll extractShpDocument(DocManager docManager, String title, String version) {
////        List<DocAll> docAllList = docManager.queryByTitle(title);
////        for (DocAll docAll: docAllList){
////            if (docAll.getDocVersion().equals(version)){
////                ShpAll shpAll = new ShpAll();
////                ShpUtils.toShpAll(docAll);
////                extractShpAllByHDFS(shpAll);
////                return shpAll;
////            }
////        }
////        return null;
////    }
//
//    @Override
//    public ShpAll queryByInternalId(BigDecimal shpInternalId) {
//        return null;
//    }
//
//    @Override
//    public List<ShpAll> queryByEnTitle(String shpEnTitle) {
//        return null;
//    }
//
//    @Override
//    public List<ShpAll> queryByComment(String shpCnTitle) {
//        return null;
//    }
//
//    public ShpAll extractShpAllByHDFS(ShpAll shpAll){
//        StringBuilder stringBuilder = new StringBuilder();
//        String[] ss = shpAll.getShpUrl().split("/");
//        String remoteFileName = ss[ss.length-1];
//        for (int i = 3; i < ss.length-1; i++) {
//            stringBuilder.append("/");
//            stringBuilder.append(ss[i]);
//        }
//        String hdfsPath = stringBuilder.toString();
//        byte[] bytes = null;
//        try{
//            FileSystem fileSystem = FileSystem.get(URI.create(url), conf);
//            FSDataInputStream fsDataInputStream = fileSystem.open(new Path(hdfsPath));
//            bytes = new byte[fsDataInputStream.available()];
//            fsDataInputStream.read(bytes);
//        }catch (IOException e){
//            e.printStackTrace();
//        }
//        shpAll.setShpRawBytes(bytes);
//        return shpAll;
//    }
//
//
//
//    @Override
//    public List<ShpAll> query(Map<String, Object> parameters) {
//        return null;
//    }
//
//
//
//    @Override
//    public List<ShpAll> queryByType(String shpType) {
//        return null;
//    }
//
//    @Override
//    public List<ShpAll> queryByAppendDate(TimeValue begin, TimeValue end) {
//        return null;
//    }
//
//    @Override
//    public boolean insert(ShpAll shpAll) {
//        ShpAllMapper shpAllMapper = session.getMapper(ShpAllMapper.class);
//        if (shpAll.getShpInternalId() == null){
//            //shpAll.setShpInternalId(shpAllMapper.nextShpInternalId());
//        }
//        byte[] bytes = shpAll.getShpRawBytes();
//        String docUrl = shpAll.getShpUrl();
//        String[] ss = docUrl.split("/");
//        String remoteDir ;
//        StringBuilder stringBuilder = new StringBuilder();
//        for (int i = 3; i < ss.length-1; i++) {
//            stringBuilder.append("/");
//            stringBuilder.append(ss[i]);
//        }
//        remoteDir = stringBuilder.toString();
//        try{
//            FileSystem fileSystem = FileSystem.get(URI.create(url), conf);
//            InputStream inputStream = new ByteArrayInputStream(bytes);
//            OutputStream outputStream = fileSystem.create(new Path(remoteDir));
//            IOUtils.copyBytes(inputStream,outputStream,conf);
//        }catch (IOException e){
//            e.printStackTrace();
//        }
//        ShpItem shpItem = ShpUtils.toShpItem(shpAll);
//        ShpItemMapper shpItemMapper = session.getMapper(ShpItemMapper.class);
//        shpItemMapper.insert(shpItem);
//        session.commit();
//        return true;
//    }
//
//    /**
//     * 读取本地目录中的所有图形文档，将其插入图形库中
//     *
//     * @param localDir 本地目录
//     * @return 成功返回true, 否则返回false
//     */
//    @Override
//    public boolean insert(String localDir) {
//        return false;
//    }
//
//    /**
//     * 根据shpInternalId删除数据库中对应的图形文档记录
//     *
//     * @param shpItem 必须填充shpInternalId
//     * @return
//     */
//    @Override
//    public boolean delete(ShpItem shpItem) {
//        return false;
//    }
//
//    /**
//     * 根据shpInternalId删除数据库中对应的图形文档记录
//     *
//     * @param shpAll 必须填充shpInternalId
//     * @return
//     */
//    @Override
//    public boolean delete(ShpAll shpAll) {
//        return false;
//    }
//
//    /**
//     * 根据shpItem+shpFile更新数据库中的图形文档记录
//     *
//     * @param shpItem 该对象的shpInternalId必须填充
//     * @param shpFile 更新文档文件的内容
//     * @return 更新成功返回true, 否则返回false
//     */
//    @Override
//    public boolean update(ShpItem shpItem, ShpFile shpFile) {
//        return false;
//    }
//
//    /**
//     * 根据根据shpAll中的信息更新数据库中的记录
//     *
//     * @param shpAll 该对象的shpInternalId必须填充
//     * @return 更新成功返回true, 否则返回false
//     */
//    @Override
//    public boolean update(ShpAll shpAll) {
//        return false;
//    }
//
////    @Override
////    public boolean insert(String localDir, DocManager docManager) {
////        File file = new File(localDir);
////        if(file.isDirectory()){
////            File[] files = file.listFiles();
////            for (File f : files){
////                if (f.getName().endsWith("shpbakeup") || f.getName().endsWith("shx") || f.getName().endsWith("dbf")) {
////                    ShpAll shpAll = new ShpAll();
////                    //shpAll.setShpInternalId(new ShpAllMapper().nextShpInternalId());
////                    shpAll.setShpCreateDate(new Date());
////                    String[] ss = localDir.split("/");
////                    shpAll.setShpUrl("ftp://124.71.9.22:21/" + ss[ss.length - 1]);
////                    shpAll.setShpCnTitle(f.getName());
////                    FileInputStream fileInputStream;
////                    ByteArrayOutputStream out;
////                    try {
////                        fileInputStream = new FileInputStream(f);
////                        out = new ByteArrayOutputStream(1000);
////                        byte[] b = new byte[1000];
////                        int n;
////                        while ((n = fileInputStream.read(b)) != -1) {
////                            out.write(b, 0, n);
////                        }
////                        shpAll.setShpRawBytes(b);
////                    } catch (FileNotFoundException e) {
////                        e.printStackTrace();
////                    } catch (IOException e) {
////                        e.printStackTrace();
////                    }
////                    insert(shpAll);
////                }
////            }
////        }
////        return true;
//////    }
//
//    /**
//     * 查询得到所有ShapeBlock的边界矩形
//     * @return
//     */
//    @Override
//    public  List<Pair<BigDecimal, Envelope>> queryAllEnvelopes(){
//        try {
//            ShpItemMapper shpItemMapper = session.getMapper(ShpItemMapper.class);
//            List<LinkedHashMap<String, Object>> ls = shpItemMapper.selectAllInternalIdAndExtents();
//            System.out.println(ls.size());
//            List<Pair<BigDecimal, Envelope>> r = new ArrayList<>();
//            for (LinkedHashMap<String, Object> l: ls){
//                BigDecimal id = (BigDecimal)l.get("SHP_INTERNAL_ID");
//                String coords = (String)l.get("SHP_EXTENT");
//                String [] cs = coords.split(FileDataSplitter.COMMA.getDelimiter());
//                double minX = Double.valueOf(cs[0]).doubleValue();
//                double maxX = Double.valueOf(cs[1]).doubleValue();
//                double minY = Double.valueOf(cs[2]).doubleValue();
//                double maxY = Double.valueOf(cs[3]).doubleValue();
//                Envelope envelope = new Envelope(minX,maxX,minY,maxY);
//                r.add(new Pair<>(id,envelope));
//            }
//            return r;
//        }
//        catch (Exception e){
//            e.printStackTrace();
//        }
//        return null;
//    }
//
//    /**
//     * 获取图形块ShapeBlock级别的查询器
//     *
//     * @return
//     */
//    @Override
//    public BlockQuerier getBlockQuerier() {
//        return new DefaultBlockQuerier(this);
//    }
//
//    /**
//     * 获取图形块ShapeBlock级别的更新器
//     * @return
//     */
//    @Override
//    public BlockUpdater getBlockUpdater() {
//        return new DefaultBlockUpdater(this);
//    }
}
