package cn.piesat.scanning.business.dq1043.algorithm;

import cn.piesat.scanning.business.common.algorithm.dataScan.DataScanParams;
import cn.piesat.scanning.dto.*;
import cn.piesat.scanning.service.DbDmsAlgorithmResultStoreService;
import cn.piesat.scanning.service.DbDmsValidationService;
import cn.piesat.scanning.utils.RedisUtil;
import com.alibaba.fastjson.JSON;
import cn.piesat.scanning.business.common.algorithm.dataScan.DataScanAbstract;
import cn.piesat.scanning.business.dq1043.vo.ExtentVO;
import cn.piesat.scanning.business.dq1043.vo.MetaDataInDbVO;
import cn.piesat.scanning.service.DbDmsSchedulerTaskFileRecordService;
import cn.piesat.scanning.service.HtScanningMetadataService;
import com.google.common.collect.Lists;
import com.vividsolutions.jts.geom.Geometry;
import com.vividsolutions.jts.io.ParseException;
import com.vividsolutions.jts.io.WKTReader;
import org.apache.commons.lang3.StringUtils;
import org.apache.kafka.clients.consumer.ConsumerRecords;
import org.apache.kafka.clients.consumer.KafkaConsumer;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.beans.factory.annotation.Value;
import org.springframework.stereotype.Component;
import redis.clients.jedis.Jedis;

import java.io.File;
import java.io.UnsupportedEncodingException;
import java.time.Duration;
import java.util.*;
import java.util.concurrent.*;
import java.util.stream.Collectors;

/**
 * 数据入库插件
 *
 * @author xhl
 */
@Component
public class DataScanAlgorithm extends DataScanAbstract {

    private static final Logger logger = LoggerFactory.getLogger(DataScanAlgorithm.class);

    @Value("${scanning.data.batch.pagesize:1000}")
    private int batchPageSize;
    @Autowired
    private KafkaConsumer kafkaConsumer;
    @Autowired
    private HtScanningMetadataService htScanningMetadataService;
    @Autowired
    private DbDmsSchedulerTaskFileRecordService dbDmsSchedulerTaskFileRecordService;
    @Autowired
    private DbDmsAlgorithmResultStoreService dbDmsAlgorithmResultStoreService;
    @Autowired
    private DbDmsValidationService dbDmsValidationService;


    private void splitValidationFileName(String fileName, DbDmsScanningValidationDTO dto) {
        String[] nameSplit = fileName.split("_");
        if (fileName.contains("FY4A")){
            //FY4A-_AGRI--_N_DISK_1047E_L1-_FDI-_MULT_NOM_20190520040000_20190520041459_4000M_V0001.HDF
            //  0     1    2    3   4    5   6    7    8      9              10          11     12
            dto.setSatellite(nameSplit[0].replace("-",""));
            dto.setSensor(nameSplit[1].replace("-",""));
            dto.setResolution(nameSplit[11].replace("M",""));
            dto.setDataLevel(nameSplit[5].replace("-",""));
            dto.setFileDate(nameSplit[9].replace("-",""));
            dto.setProductIdentify(nameSplit[6].replace("-",""));
        }
        if (fileName.contains("H08")){
            //NC_H08_20200817_0620_L2CLP010_FLDK.02401_02401.nc
            // 0  1    2        3   4
            dto.setSatellite(nameSplit[1]);
            dto.setFileDate(nameSplit[2]+nameSplit[3]+"00");
            String dataLevel = nameSplit[4].substring(0,2);
            dto.setDataLevel(dataLevel);
            String productIdentify = nameSplit[4].substring(2,5);
            dto.setProductIdentify(productIdentify);
        }
    }

    @Override
    public void assembleAux(List<Object> pojoList, HtPathConfigurationDTO pathDTO, File file,
                            Set<String> existFileNameSet, String auxType) {
        if (auxType.equals("T799")){
            DbDmsScanningMetadataAuxInfoDTO auxInfoDTO = new DbDmsScanningMetadataAuxInfoDTO();
            String fileName = file.getName();
            if (existFileNameSet.contains(fileName)) {
                return;
            }
            ResourceBundle resourceBundle = ResourceBundle.getBundle("meteorologicalElement");
            Set<String> keys = resourceBundle.keySet();
            for(String key : keys) {
                if (fileName.contains(key)) {
                    auxInfoDTO.setFileWant(key);
                    try {
                        auxInfoDTO.setFileWantRemark(new String(resourceBundle.getString(key).getBytes("ISO-8859-1"), "UTF-8"));
                    } catch (UnsupportedEncodingException e) {
                        logger.error("字符类型出现异常{}",e.getMessage(),e);
                    }
                    break;
                }
            }
            String[] split = fileName.split("\\.");
            auxInfoDTO.setFileTimeSecond(Integer.valueOf(split[1]));
            auxInfoDTO.setFileDate(split[0].substring(split[0].length() - 10) + "0000");
            String filePath = file.toString().substring(pathDTO.getScanningPath().length());
            auxInfoDTO.setFileName(fileName);
            auxInfoDTO.setFilePath(filePath);
            auxInfoDTO.setFileSize(file.length());
            pojoList.add(auxInfoDTO);
        }
    }

    @Override
    public void assembleDist(List<Object> pojoList, HtPathConfigurationDTO pathDTO, File file, Set<String> existFileNameSet, DataScanParams dataScanParams) {
        if(dataScanParams.getDataSource().equals(VALIDATION)){
            DbDmsScanningValidationDTO dto = new DbDmsScanningValidationDTO();
            String fileName = file.getName();
            String path = file.toString().substring(pathDTO.getScanningPath().length());
            if (existFileNameSet.contains(fileName)) {
                return;
            }
            splitValidationFileName(fileName,dto);
            dto.setFilePath(path);
            dto.setFileName(fileName);
            dto.setFileSize(file.length());
            dto.setPathConfigId(pathDTO.getId());
            String fileType = fileName.substring(fileName.lastIndexOf(".")+1).toUpperCase();
            if(!fileType.equals("NC")){
                return;
            }
            dto.setFileType(fileType);
            dto.setCreateTime(new Date());
            pojoList.add(dto);
        }else{
            HtScanningMetadataDTO metadataDTO = new HtScanningMetadataDTO();
            String fileName = file.getName();
            String path = file.toString().substring(pathDTO.getScanningPath().length());
            if (existFileNameSet.contains(fileName)) {
                return;
            }
            splitFileName(fileName,metadataDTO);
            metadataDTO.setFilePath(path);
            metadataDTO.setFileName(fileName);
            metadataDTO.setFileSize(file.length());
            metadataDTO.setPathConfigId(pathDTO.getId());
            metadataDTO.setFileType(fileName.substring(fileName.lastIndexOf(".")+1).toUpperCase());
            metadataDTO.setCreateTime(new Date());
            pojoList.add(metadataDTO);
        }

    }

    public void saveRedisData(String taskId, HtPathConfigurationDTO pathDTO){
        Map<String, MetaDataInDbVO> inputMap = new HashMap<>();
        //先去redis中获取数据，去重获取时次，根据时次去任务记录表中根据文件名like(时次)查询是否有正在执行的数据，有，不动，
        //任务记录表中没有时次字段，可以根据filename like(时次)进行匹配
        // 没有就删除对应时次的数据（源数据表、任务记录表、结果记录表）
        Jedis jedis = RedisUtil.getJedis();
        try{
            String result = jedis.hget("WaitList:","1043");
            if(StringUtils.isNotBlank(result)&& !("[]").equals(result)){
                List<MetaDataInDbVO> list = JSON.parseArray(result,MetaDataInDbVO.class);

                //根据时次逻辑判断
                List<String> deleteFileDates = new ArrayList<>();
                List<String> deleteFileDateBeis = new ArrayList<>();
                List<String> fileDates = list.stream().map(p->p.getDataTime()).distinct().collect(Collectors.toList());
                for(String fileDate :fileDates){
                    String fileDateBei ="";
                    StringBuilder sb = new StringBuilder(fileDate);
                    sb.insert(8,"_");
                    fileDateBei = sb.toString();
                    List<DbDmsSchedulerTaskFileRecordDTO> recordList = dbDmsSchedulerTaskFileRecordService.findByFileNameLikeAndStateLessThan(fileDateBei,2);
                    if(recordList.size()==0 || recordList ==null){
                        deleteFileDates.add(fileDate);
                        deleteFileDateBeis.add(fileDateBei);
                    }
                }
                //根据任务计划号删除对应的数据
                for(String fileDateBei:deleteFileDateBeis){
                    dbDmsSchedulerTaskFileRecordService.deleteByFileNameLikeAndTaskPlanIdNotNull(fileDateBei);
                }
                List<String> l0FileNameList = htScanningMetadataService.findByFileDateInAndDataLevel(deleteFileDates,"L0")
                        .stream().map(p->p.getFileName()).distinct().collect(Collectors.toList());
                //补充删除L0数据如：JB20-1_TIR-1_000006094_001.AUX  文件名不包含日期格式的
                dbDmsSchedulerTaskFileRecordService.deleteByFileNameIn(l0FileNameList);

                htScanningMetadataService.deleteByFileDateIn(deleteFileDates);
                dbDmsAlgorithmResultStoreService.deleteByFileDateIn(deleteFileDates);
                dbDmsValidationService.deleteByFileDateIn(deleteFileDates);
                //执行中的数据，需存到redis中
                List<MetaDataInDbVO> executeList = list.stream().filter(p -> !deleteFileDates.contains(p.getDataTime())).collect(Collectors.toList());
                //需要入库的数据
                List<MetaDataInDbVO> inputList = list.stream().filter(p ->deleteFileDates.contains(p.getDataTime())).collect(Collectors.toList());


                jedis.hset("WaitList:","1043",JSON.toJSON(executeList).toString());
                for (MetaDataInDbVO vo : inputList) {
                    File file = new File(pathDTO.getScanningPath() + File.separator + vo.getFilePath());
                    String fileName = file.getName();
                    inputMap.put(fileName,vo);
                }
                List<String> inputNameList = new ArrayList<>(inputMap.keySet());
                //往记录表里增加记录
                dbDmsSchedulerTaskFileRecordService.prepareDataByFileNameList(taskId,inputNameList,inputMap);

                List<HtScanningMetadataDTO> metadataDtos = inputNameList.parallelStream().map(fileName -> {
                    MetaDataInDbVO dataVO = inputMap.get(fileName);
                    return kafkaDataTransMetadata(dataVO,pathDTO);
                }).collect(Collectors.toList());

                ExecutorService threadPool = Executors.newFixedThreadPool(30);
                Lists.partition(metadataDtos, batchPageSize)
                        .forEach(subList -> CompletableFuture.runAsync(() -> htScanningMetadataService.addEntityBatch(subList, taskId), threadPool));
                threadPool.shutdown();
            }
        }catch (Exception e){
            logger.error("redis解析入库失败{}",e.getMessage());
        }finally {
            jedis.close();
        }
    }

    @Override
    public void kafkaSave(String taskId, HtPathConfigurationDTO pathDTO) {
        //从redis中将之前未入库的数据判断入库
        saveRedisData(taskId,pathDTO);
        //从kafka中获取数据
        ConsumerRecords<String,String> records = kafkaConsumer.poll(Duration.ofMillis(100000));
        Map<String, MetaDataInDbVO> distinctRecordMap = new HashMap<>();
        records.forEach(record -> {
            String value = record.value();
            MetaDataInDbVO metaDataInDbVO = null;
            try {
                metaDataInDbVO = JSON.parseObject(value, MetaDataInDbVO.class);
            } catch (Exception e) {
                logger.error("Kafka数据格式化异常{},{}",e.getMessage(),value);
            }
            if (metaDataInDbVO != null) {
                List<MetaDataInDbVO> dataList = new ArrayList<>();
                if ("L0".equals(metaDataInDbVO.getDataLevel())) {
                    File dir = new File(pathDTO.getScanningPath() + File.separator + metaDataInDbVO.getFilePath());
                    if (dir.exists() && dir.isDirectory()) {
                        File[] l0Files = dir.listFiles();
                        for (File f : l0Files) {
                            MetaDataInDbVO l0DataVO = copyVO(metaDataInDbVO);
                            l0DataVO.setFilePath(l0DataVO.getFilePath() + File.separator + f.getName());
                            dataList.add(l0DataVO);
                        }
                    }
                } else {
                    dataList.add(metaDataInDbVO);
                }
                for (MetaDataInDbVO vo : dataList) {
                    File file = new File(pathDTO.getScanningPath() + File.separator + vo.getFilePath());
                    String fileName = file.getName();
                    distinctRecordMap.put(fileName,vo);
                }
            }
        });
        //获得所有的文件名称集合
        List<String> execFileNameList = new ArrayList<>(distinctRecordMap.keySet());
        if(execFileNameList.size()>0 && execFileNameList !=null){
            //根据任务id和文件名集合 从扫描记录表里获取status为2的数据包含（文件名、任务计划号）--已经录入过的数据，此次是回放的文件
            List<DbDmsSchedulerTaskFileRecordDTO> existList = dbDmsSchedulerTaskFileRecordService.findByFileNameList(execFileNameList,taskId);
            List<String> existListFileName = existList.stream().map(p->p.getFileName()).collect(Collectors.toList());
            List<String> fileDates =htScanningMetadataService.findByFileNameIn(existListFileName)
                    .stream()
                    .map(p->p.getFileDate())
                    .distinct()
                    .collect(Collectors.toList());

            //获取回放数据的时次
            List<String> saveResisFileName = new ArrayList<>();
            List<String> deleteFileDates = new ArrayList<>();
            List<String> deleteFileDateBeis = new ArrayList<>();
            for(String fileDate :fileDates){
                String fileDateBei ="";
                StringBuilder sb = new StringBuilder(fileDate);
                sb.insert(8,"_");
                fileDateBei = sb.toString();
                List<DbDmsSchedulerTaskFileRecordDTO> recordList = dbDmsSchedulerTaskFileRecordService.findByFileNameLikeAndStateLessThan(fileDateBei,2);
                if(recordList.size()>0 && recordList !=null){
                    List<String> fileNameList = recordList.stream().map(p ->p.getFileName()).collect(Collectors.toList());
                    saveResisFileName.addAll(fileNameList);
                    deleteFileDates.add(fileDate);
                    execFileNameList.removeAll(saveResisFileName);
                }else{
                    deleteFileDateBeis.add(fileDateBei);
                }
            }
            fileDates.removeAll(deleteFileDates);

            List<MetaDataInDbVO> saveRedisList = new ArrayList<>();
            for(String name:saveResisFileName){
                MetaDataInDbVO a = distinctRecordMap.get(name);
                if(a !=null){
                    saveRedisList.add(a);
                }
            }
            Jedis jedis1 = RedisUtil.getJedis();
            try{
                String res = jedis1.hget("WaitList:","1043");
                List<MetaDataInDbVO> redisList = new ArrayList<>();
                if(StringUtils.isNotBlank(res)){
                    redisList = JSON.parseArray(res,MetaDataInDbVO.class);
                }
                redisList.addAll(saveRedisList);
                jedis1.hset("WaitList:","1043",JSON.toJSON(redisList).toString());
            }catch (Exception e){
                logger.error("redis解析入库失败{}",e.getMessage());
            }finally {
                jedis1.close();
            }



            //根据任务计划号删除源数据表、任务记录表、结果记录表对应的数据
            for(String fileDateBei:deleteFileDateBeis){
                dbDmsSchedulerTaskFileRecordService.deleteByFileNameLikeAndTaskPlanIdNotNull(fileDateBei);
            }
            List<String> l0FileNameList = htScanningMetadataService.findByFileDateInAndDataLevel(fileDates,"L0")
                    .stream().map(p->p.getFileName()).distinct().collect(Collectors.toList());
            //补充删除L0数据如：JB20-1_TIR-1_000006094_001.AUX  文件名不包含日期格式的
            dbDmsSchedulerTaskFileRecordService.deleteByFileNameIn(l0FileNameList);

            htScanningMetadataService.deleteByFileDateIn(fileDates);
            dbDmsAlgorithmResultStoreService.deleteByFileDateIn(fileDates);
            //删除质检源结果相关表对应的数据
            dbDmsValidationService.deleteByFileDateIn(fileDates);
            //往记录表里增加记录
            dbDmsSchedulerTaskFileRecordService.prepareDataByFileNameList(taskId,execFileNameList,distinctRecordMap);

            List<HtScanningMetadataDTO> metadataDtos = execFileNameList.parallelStream().map(fileName -> {
                MetaDataInDbVO dataVO = distinctRecordMap.get(fileName);
                return kafkaDataTransMetadata(dataVO,pathDTO);
            }).collect(Collectors.toList());

            ExecutorService threadPool = Executors.newFixedThreadPool(30);
            Lists.partition(metadataDtos, batchPageSize)
                    .forEach(subList -> CompletableFuture.runAsync(() -> htScanningMetadataService.addEntityBatch(subList, taskId), threadPool));
            threadPool.shutdown();

        }

        if (records.count() > 0) {
            kafkaConsumer.commitAsync();
        }
    }

    /**
     * 获取文件名称中的 卫星、传感器、分辨率、等级、日期等信息
     * PS：因各个卫星命名规则不同，当接入新的数据时需增加对应文件名称 [格式分割代码]
     * @param fileName 文件名称
     * @param entityDto 实体类
     */
    private void splitFileName(String fileName,HtScanningMetadataDTO entityDto){
        String[] nameSplit = fileName.split("_");
        if (fileName.contains("FY4A")){
            //FY4A-_AGRI--_N_DISK_1047E_L1-_FDI-_MULT_NOM_20190520040000_20190520041459_4000M_V0001.HDF
            //  0     1    2    3   4    5   6    7    8      9              10          11     12
            entityDto.setSatellite(nameSplit[0].replace("-",""));
            entityDto.setSensor(nameSplit[1].replace("-",""));
            entityDto.setResolution(nameSplit[11].replace("M",""));
            entityDto.setDataLevel(nameSplit[5].replace("-",""));
            entityDto.setFileDate(nameSplit[9]);
            entityDto.setRegion(nameSplit[3]);
        }
        if (fileName.contains("FY3D") && fileName.contains("MERSI")){
            //FY3D_MERSI_GBAL_L1_20190828_0640_0250M_MS.HDF
            //  0     1    2  3   4         5   6    7
            entityDto.setSatellite(nameSplit[0]);
            entityDto.setSensor(nameSplit[1]);
            entityDto.setResolution(nameSplit[6].replace("M",""));
            entityDto.setDataLevel(nameSplit[3]);
            entityDto.setFileDate(nameSplit[4]+nameSplit[5]);
        }
        if (fileName.contains("FY3D") && fileName.contains("MWRIA")){
            //FY3D_MWRIA_ORBT_L2_CLW_MLT_NUL_20190901_0127_025KM_MS.HDF
            //  0     1    2  3   4  5   6    7         8   9     10
            entityDto.setSatellite(nameSplit[0]);
            entityDto.setSensor(nameSplit[1]);
            entityDto.setResolution(nameSplit[9].replace("M",""));
            entityDto.setDataLevel(nameSplit[3]);
            entityDto.setFileDate(nameSplit[7]+nameSplit[8]);
        }
        if (fileName.contains("H08")){
            //HS_H08_FLDK_201803060400.hdf
            // 0  1    2    3
            entityDto.setSatellite(nameSplit[1]);
            entityDto.setFileDate(nameSplit[3]);
        }
        if (fileName.contains("JB20-1")){
            if (fileName.contains("GEO")) {
                //JB20-1_TIR_20200817_005000_B12_E19.60_S49.05_M01_GEO_L1B.HDF
                entityDto.setLat(nameSplit[6]);
                entityDto.setLon(nameSplit[5]);
                entityDto.setSensor(nameSplit[1]);
                entityDto.setBands(nameSplit[4]);
                entityDto.setDataLevel(nameSplit[9].substring(0,nameSplit[9].indexOf(".")));
                entityDto.setFileDate(nameSplit[2] + nameSplit[3]);
                entityDto.setSatellite(nameSplit[0]);
                entityDto.setFileType(nameSplit[9].substring(nameSplit[9].indexOf(".") + 1));
            } else {
                //JB20-1_TIR_20200817_011000_B12_E1.72_N18.77_M01_L1A.HDF
                entityDto.setLat(nameSplit[6]);
                entityDto.setLon(nameSplit[5]);
                entityDto.setSensor(nameSplit[1]);
                entityDto.setBands(nameSplit[4]);
                entityDto.setDataLevel(nameSplit[8].substring(0,nameSplit[8].indexOf(".")));
                entityDto.setFileDate(nameSplit[2] + nameSplit[3]);
                entityDto.setSatellite(nameSplit[0]);
                entityDto.setFileType(nameSplit[8].substring(nameSplit[8].indexOf(".") + 1));
            }
        }
    }

    private MetaDataInDbVO copyVO(MetaDataInDbVO sourceVO) {
        MetaDataInDbVO resultVO = new MetaDataInDbVO();
        resultVO.setTaskPlanId(sourceVO.getTaskPlanId());
        resultVO.setSubPlanId(sourceVO.getSubPlanId());
        resultVO.setStation(sourceVO.getStation());
        resultVO.setOrbitId(sourceVO.getOrbitId());
        resultVO.setObsMode(sourceVO.getObsMode());
        resultVO.setViewMode(sourceVO.getViewMode());
        resultVO.setFilePath(sourceVO.getFilePath());
        resultVO.setDataTime(sourceVO.getDataTime());
        resultVO.setSatellite(sourceVO.getSatellite());
        resultVO.setSensor(sourceVO.getSensor());
        resultVO.setResolution(sourceVO.getResolution());
        resultVO.setDataLevel(sourceVO.getDataLevel());
        resultVO.setProductId(sourceVO.getProductId());
        resultVO.setProductMode(sourceVO.getProductMode());
        resultVO.setCoordinate(sourceVO.getCoordinate());
        resultVO.setComRatio(sourceVO.getComRatio());
        resultVO.setBands(sourceVO.getBands());
        resultVO.setExtent(sourceVO.getExtent());
        return resultVO;
    }

    /**
     * kafka数据格式转换为源数据格式
     * @param dataVO kafka数据
     * @param pathDTO 路径对象
     * @return
     */
    private HtScanningMetadataDTO kafkaDataTransMetadata(MetaDataInDbVO dataVO, HtPathConfigurationDTO pathDTO) {
        HtScanningMetadataDTO metadataDTO = new HtScanningMetadataDTO();
        if (StringUtils.isNotBlank(dataVO.getId())) {
            metadataDTO.setId(dataVO.getId());
        }
        //解析景号、段号
        String[] pathSplit = dataVO.getFilePath().split("/");
        metadataDTO.setViewNumber(pathSplit[pathSplit.length - 2]);
        File file = new File(pathDTO.getScanningPath() + File.separator + dataVO.getFilePath());
        String fileName = file.getName();
        if (!dataVO.getDataLevel().toUpperCase().equals("L0")) {
            //解析中心经度和纬度
            String[] fileNameSplit = fileName.split("_");
            metadataDTO.setLon(fileNameSplit[5]);
            metadataDTO.setLat(fileNameSplit[6]);
        }
        metadataDTO.setSatellite(dataVO.getSatellite());
        metadataDTO.setSensor(dataVO.getSensor());
        metadataDTO.setResolution(dataVO.getResolution());
        metadataDTO.setDataLevel(dataVO.getDataLevel());
        metadataDTO.setFilePath(dataVO.getFilePath().replaceAll("/+","/"));
        metadataDTO.setFileDate(dataVO.getDataTime());
        metadataDTO.setFileSize(file.length());
        metadataDTO.setFileName(fileName);
        if (fileName.lastIndexOf(".") != -1) {
            metadataDTO.setFileType(fileName.substring(fileName.lastIndexOf(".")+1).toUpperCase());
        }
        metadataDTO.setCreateTime(new Date());
        metadataDTO.setPathConfigId(pathDTO.getId());
        metadataDTO.setTaskPlanId(dataVO.getTaskPlanId());
        metadataDTO.setSubPlanId(dataVO.getSubPlanId());
        metadataDTO.setStation(dataVO.getStation());
        metadataDTO.setOrbitId(dataVO.getOrbitId());
        metadataDTO.setObsMode(dataVO.getObsMode());
        metadataDTO.setViewMode(dataVO.getViewMode());
        metadataDTO.setProductIid(dataVO.getProductId());
        metadataDTO.setProductMode(dataVO.getProductMode());
        metadataDTO.setCoordinate(dataVO.getCoordinate());
        metadataDTO.setComRatio(dataVO.getComRatio());
        metadataDTO.setBands(dataVO.getBands());
        ExtentVO extent = dataVO.getExtent();
        if (extent != null) {
            metadataDTO.setTopleftLat(extent.getTopleftLat());
            metadataDTO.setTopleftLon(extent.getTopleftLon());
            metadataDTO.setToprightLat(extent.getToprightLat());
            metadataDTO.setToprightLon(extent.getToprightLon());
            metadataDTO.setBottomrightLat(extent.getBottomrightLat());
            metadataDTO.setBottomrightLon(extent.getBottomrightLon());
            metadataDTO.setBottomleftLat(extent.getBottomleftLat());
            metadataDTO.setBottomleftLon(extent.getBottomleftLon());
            String polygon = new StringBuilder()
                    .append("Polygon((")
                    .append(extent.getTopleftLon()).append(" ").append(extent.getTopleftLat()).append(",")
                    .append(extent.getBottomleftLon()).append(" ").append(extent.getBottomleftLat()).append(",")
                    .append(extent.getBottomrightLon()).append(" ").append(extent.getBottomrightLat()).append(",")
                    .append(extent.getToprightLon()).append(" ").append(extent.getToprightLat()).append(",")
                    .append(extent.getTopleftLon()).append(" ").append(extent.getTopleftLat())
                    .append("))").toString();
            WKTReader wktReader = new WKTReader();
            Geometry geometry;
            try {
                geometry = wktReader.read(polygon);
            } catch (ParseException e) {
                geometry = null;
            }
            metadataDTO.setAddressLoc(geometry);
        }
        return metadataDTO;
    }
}
