package cn.com.acca.ma.service.impl;

import cn.com.acca.ma.common.util.*;
import cn.com.acca.ma.enumeration.OracleColumnType;
import cn.com.acca.ma.enumeration.OracleObjectType;
import cn.com.acca.ma.enumeration.TableName;
import cn.com.acca.ma.model.*;
import cn.com.acca.ma.model.db.UserColumnComment;
import cn.com.acca.ma.model.db.UserIndexes;
import cn.com.acca.ma.model.db.UserTableColumns;
import cn.com.acca.ma.model.db.UserTableComment;
import cn.com.acca.ma.pojo.OracleDatabaseObjectDefinition;
import cn.com.acca.ma.service.ProjectService;
import org.apache.commons.io.FileUtils;
import org.apache.commons.io.FilenameUtils;
import org.apache.commons.lang.StringUtils;
import org.codehaus.jackson.JsonGenerationException;
import org.codehaus.jackson.JsonParseException;
import org.codehaus.jackson.map.JsonMappingException;
import org.codehaus.jackson.map.ObjectMapper;
import org.dom4j.Document;
import org.dom4j.DocumentException;
import org.dom4j.Element;
import org.dom4j.io.SAXReader;

import javax.mail.*;
import javax.mail.internet.InternetAddress;
import javax.mail.internet.MimeMessage;
import java.io.*;
import java.net.InetAddress;
import java.net.UnknownHostException;
import java.util.*;
import java.util.concurrent.CopyOnWriteArraySet;

public class ProjectServiceImpl extends BaseServiceImpl<ProjectServiceImpl, StockMovingAverage> implements ProjectService {

    /*****************************************************************************************************************
     *
     * 									设置代理服务器的IP和port
     *
     ******************************************************************************************************************/
    /**
     * 如果程序是在公司环境运行，则设置代理服务器；否则直接跳过
     */
    @Override
    @SuppressWarnings("rawtypes")
    public void setProxyProperties() {
        // 如果是在家中环境运行，则直接返回
        if (this.isHomeComputer() == true) {
            return;
        }

        logger.info("开始设置代理服务器的IP和端口......");

        // IP
        String proxyHost = null;
        // port
        String proxyPort = null;

        try {
            // 获取IP和端口
            SAXReader saxReader = new SAXReader();
            Document document = saxReader.read(new File(STOCK_RECORD_XML));
            Element rootElt = document.getRootElement();
            for (Iterator it = rootElt.elementIterator(); it.hasNext(); ) {
                Element element = (Element) it.next();
                if (element.attribute("id").getValue().equals("proxyProperties")) {
                    for (Iterator iterator = element.elementIterator(); iterator.hasNext(); ) {
                        Element elt = (Element) iterator.next();
                        if (elt.attributeValue("key").equals("http.proxyHost")) {
                            proxyHost = elt.attributeValue("value");
                            logger.info("代理服务器的IP为：" + proxyHost);
                        }
                        if (elt.attributeValue("key").equals("http.proxyPort")) {
                            proxyPort = elt.attributeValue("value");
                            logger.info("代理服务器的端口为：" + proxyPort);
                        }
                    }
                }
            }
        } catch (DocumentException e) {
            e.printStackTrace();
        }

        // 设置http代理
        System.getProperties().setProperty("socksProxySet", "true");
        System.getProperties().setProperty("http.proxyHost", proxyHost);
        System.getProperties().setProperty("http.proxyPort", proxyPort);
    }

    /**
     * 判断程序是在家里运行，还是在公司环境运行。 默认是在家中运行。
     */
    @SuppressWarnings("rawtypes")
    public boolean isHomeComputer() {
        logger.info("开始判断是在家中环境，还是公司环境......");

        InetAddress inetAddress = null;
        // 公司电脑的IP
        String companyIP = null;
        // 当前的IP
        String currentIP = null;

        try {
            // 获取当前的IP
            inetAddress = InetAddress.getLocalHost();
            currentIP = inetAddress.getHostAddress().toString();
            logger.info("本地的IP为：" + currentIP);

            // 获取公司电脑的IP
            SAXReader saxReader = new SAXReader();
            Document document = saxReader.read(new File(STOCK_RECORD_XML));
            Element rootElement = document.getRootElement();
            for (Iterator it = rootElement.elementIterator(); it.hasNext(); ) {
                Element element = (Element) it.next();
                if (element.attribute("id").getValue().equals("proxyProperties")) {
                    for (Iterator iterator = element.elementIterator(); iterator.hasNext(); ) {
                        Element elt = (Element) iterator.next();
                        if (elt.attributeValue("key").equals("companyIP")) {
                            companyIP = elt.attributeValue("value");
                            logger.info("公司环境的IP为：" + companyIP);
                        }
                    }
                }
            }

            // 判断现在程序是在家运行，还是在公司运行
            if (currentIP.equals(companyIP)) {
                // 在公司环境运行
                logger.info("程序是在公司环境运行");
                return false;
            } else {
                // 在家中的环境运行
                logger.info("程序是在家中环境运行");
                return true;
            }
        } catch (DocumentException e) {
            e.printStackTrace();
        } catch (UnknownHostException e1) {
            e1.printStackTrace();
        }
        logger.info("程序是在家中环境运行");
        return true;
    }

    /*****************************************************************************************************************
     *
     * 										收集数据库统计信息
     *
     * ****************************************************************************************************************/
    /**
     * 如果当天是星期五，则收集数据库统计信息
     * 收集数据库统计信息
     */
    public void gatherDatabaseStatistics() {

//		if (DateUtil.dayForWeek(new Date()).equals(Calendar.FRIDAY)){
//			logger.info("今天是星期五，开始收集数据库的统计信息......");
//
//			projectDao.gatherDatabaseStatistics();
//		}
//		logger.info("今天不是星期五，不收集数据库的统计信息");

        logger.info("开始收集数据库的统计信息");
        projectDao.gatherDatabaseStatistics();
    }

    /*********************************************************************************************************************
     *
     * 									                          备份项目代码
     *
     *********************************************************************************************************************/
    /**
     * 备份项目代码，并删除备份目录下的target目录
     */
    public void backupCode() {
        logger.info("开始备份项目代码");

        String targetDir = CODE_BACKUP_DIR + "adam_" + DateUtil.getYearFromDate(new Date()) + "_" + DateUtil.getMonthFromDate(new Date()) + "_" + DateUtil.getDateFromDate(new Date());
        new File(targetDir).mkdir();
        try {
            FileUtils.copyDirectory(new File(DIR), new File(targetDir));
            FileUtils.deleteDirectory(new File(targetDir + "/target"));

            logger.info("代码备份完成。备份路径为：" + targetDir);
        } catch (IOException e) {
            e.printStackTrace();
        }

        // 旧的代码，现在已经不再使用
//		String targetDir=BACKUP_DIR+"adam_"+DateUtil.getYearFromDate(new Date())+"_"+DateUtil.getMonthFromDate(new Date())+"_"+DateUtil.getDateFromDate(new Date());
//		new File(targetDir).mkdir();
//		logger.info("cmd /c start xcopy /S /D "+DIR+" "+targetDir);
//		try {
//			Runtime.getRuntime().exec("cmd /c start xcopy /S /D "+DIR+" "+targetDir);
//		} catch (IOException e) {
//			e.printStackTrace();
//		}
    }

    /*********************************************************************************************************************
     *
     * 									              导出、导入数据库对象
     *
     *********************************************************************************************************************/
    /**
     * 导出数据库对象
     */
    @Override
    public void exportDatabaseObject() {
        logger.info("导出数据库对象");

        // 删除指定目录下的文件
        File deleteFile = new File(DATABASE_OBJECT_DEFINITION_SAVE_DIRECTORY);
        FileUtil.deleteDirectory(deleteFile);
        deleteFile.mkdir();

        // 导出TYPE
        this.exportType();

        // 导出TYPE BODY
        this.exportTypeBody();

        // 查导出SEQUENCE
        this.exportSequence();

        // 导出TABLE
        this.exportTable();

        // 导列注释
        this.exportColumnComment();

        // 导出表注释
        this.exportTableComment();

        // 导出INDEX
        this.exportIndex();

        // 导出FUNCTION
        this.exportFunction();

        // 导出PROCEDURE
        this.exportProcedure();

        // 导出PACKAGE
        this.exportPackage();

        // 导出PACKAGE BODY
        this.exportPackageBody();
    }

    /**
     * 导出TYPE
     */
    private void exportType() {
        try {
            List<String> typeNameList = projectDao.findObjectNameListByObjectType(OracleObjectType.TYPE.getObjectType());
            if (null != typeNameList && typeNameList.size() > 0) {
                for (String typeName : typeNameList) {
                    String definition = projectDao.findDefinitionByObjectTypeAndObjectName(OracleObjectType.TYPE.getObjectTypeName(), typeName);
                    // 删除开头3个无法识别的字符
                    if (definition.substring(0, 3).equals("\n  ")) {
                        definition = definition.substring(3);
                    }

                    // 只取定义，舍去主体
                    int firstIndex = definition.indexOf("CREATE OR REPLACE");
                    int lastIndex = definition.lastIndexOf("CREATE OR REPLACE");
                    if (firstIndex != lastIndex) {
                        definition = definition.substring(0, lastIndex);
                    }

                    // 保存定义
                    if (StringUtils.isNotEmpty(definition)) {
                        String filePath = DATABASE_OBJECT_DEFINITION_SAVE_DIRECTORY + typeName + OracleObjectType.TYPE.getSuffix();
                        logger.info("保存文件：" + filePath);
                        FileUtil.saveFile(filePath, definition);
                    }
                }
            } else {
                logger.warn("没有TYPE的定义");
            }
        } catch (Exception e) {
            e.printStackTrace();
        }
    }

    /**
     * 导出TYPE BODY
     */
    private void exportTypeBody() {
        try {
            List<String> typeBodyNameList = projectDao.findObjectNameListByObjectType(OracleObjectType.TYPE_BODY.getObjectType());
            if (null != typeBodyNameList && typeBodyNameList.size() > 0) {
                for (String typeBodyName : typeBodyNameList) {
                    String definition = projectDao.findDefinitionByObjectTypeAndObjectName(OracleObjectType.TYPE_BODY.getObjectTypeName(), typeBodyName);
                    // 删除开头3个无法识别的字符
                    if (definition.substring(0, 3).equals("\n  ")) {
                        definition = definition.substring(3);
                    }

                    // 保存定义
                    if (StringUtils.isNotEmpty(definition)) {
                        String filePath = DATABASE_OBJECT_DEFINITION_SAVE_DIRECTORY + typeBodyName + OracleObjectType.TYPE_BODY.getSuffix();
                        logger.info("保存文件：" + filePath);
                        FileUtil.saveFile(filePath, definition);
                    }
                }
            } else {
                logger.warn("没有TYPE BODY的定义");
            }
        } catch (Exception e) {
            e.printStackTrace();
        }
    }

    /**
     * 导出SEQUENCE
     */
    private void exportSequence() {
        try {
            List<String> sequenceNameList = projectDao.findAllSequenceName();
            if (null != sequenceNameList && sequenceNameList.size() > 0) {
                for (String sequenceName : sequenceNameList) {
                    String commentCommand = String.format("create sequence SCOTT.%s\n" +
                            "minvalue 1\n" +
                            "maxvalue 100000000000000000000000000\n" +
                            "start with 1\n" +
                            "increment by 1\n" +
                            "cache 20\n" +
                            "cycle\n" +
                            "order;\n", sequenceName);
                    String filePath = DATABASE_OBJECT_DEFINITION_SAVE_DIRECTORY + sequenceName + OracleObjectType.SEQUENCE.getSuffix();
                    logger.info("保存文件：" + filePath);
                    FileUtil.saveFile(filePath, commentCommand);
                }
            } else {
                logger.warn("没有SEQUENCE的定义");
            }
        } catch (Exception e) {
            e.printStackTrace();
        }
    }

    /**
     * 导出TABLE
     */
    private void exportTable() {
        try {
            List<String> tableNameList = projectDao.findObjectNameListByObjectType(OracleObjectType.TABLE.getObjectType());
            if (null != tableNameList && tableNameList.size() > 0) {
                for (String tableName : tableNameList) {
                    List<UserTableColumns> userTableColumnsList = userTableColumnDao.findByTableName(tableName);
                    if (null != userTableColumnsList && userTableColumnsList.size() > 0) {
                        StringBuffer definitionStringBuffer = new StringBuffer("create table SCOTT." + tableName + "\n" +
                                "(");

                        for (UserTableColumns userTableColumns : userTableColumnsList) {
                            if (userTableColumns.getKey().getColumnName().toLowerCase().equals("id") || userTableColumns.getKey().getColumnName().toLowerCase().equals("id_")) {
                                definitionStringBuffer.append(userTableColumns.getKey().getColumnName()).append("\t").append("NUMBER generated by default as identity,\n");
                            } else {
                                if (userTableColumns.getDataType().equals(OracleColumnType.NUMBER.getName())) {
                                    definitionStringBuffer.append(userTableColumns.getKey().getColumnName()).append("\t").append("NUMBER");
                                    if (null != userTableColumns.getDataPrecision() && null != userTableColumns.getDataScale()) {
                                        definitionStringBuffer.append("(").append(userTableColumns.getDataPrecision()).append(",")
                                                .append(userTableColumns.getDataScale()).append("),\n");
                                    } else {
                                        definitionStringBuffer.append(",\n");
                                    }
                                }
                                if (userTableColumns.getDataType().equals(OracleColumnType.NVARCHAR2.getName())) {
                                    definitionStringBuffer.append(userTableColumns.getKey().getColumnName()).append("\t").append("NVARCHAR2(")
                                            .append(userTableColumns.getDataLength()).append("),\n");
                                }
                                if (userTableColumns.getDataType().equals(OracleColumnType.VARCHAR2.getName())) {
                                    definitionStringBuffer.append(userTableColumns.getKey().getColumnName()).append("\t").append("VARCHAR2(")
                                            .append(userTableColumns.getDataLength()).append("),\n");
                                }
                                if (userTableColumns.getDataType().equals(OracleColumnType.DATE.getName())) {
                                    definitionStringBuffer.append(userTableColumns.getKey().getColumnName()).append("\t").append("DATE,\n");
                                }
                                if (userTableColumns.getDataType().startsWith(OracleColumnType.TIMESTAMP.getName())) {
                                    definitionStringBuffer.append(userTableColumns.getKey().getColumnName()).append("\t").append(userTableColumns.getDataType() + ",\n");
                                }
                                if (userTableColumns.getDataType().startsWith(OracleColumnType.CLOB.getName())) {
                                    definitionStringBuffer.append(userTableColumns.getKey().getColumnName()).append("\t").append(userTableColumns.getDataType() + ",\n");
                                }
                            }
                        }

                        // 删除最后的逗号
                        definitionStringBuffer = new StringBuffer(definitionStringBuffer.toString().substring(0, definitionStringBuffer.toString().length() - 2)).append("\n");

                        definitionStringBuffer.append(")\n" +
                                "tablespace USERS\n" +
                                "  pctfree 10\n" +
                                "  initrans 1\n" +
                                "  maxtrans 255\n" +
                                "  storage\n" +
                                "  (\n" +
                                "    initial 1160M\n" +
                                "    next 1M\n" +
                                "    minextents 1\n" +
                                "    maxextents unlimited\n" +
                                "  )");

                        // 保存定义
                        if (StringUtils.isNotEmpty(definitionStringBuffer.toString())) {
                            String filePath = DATABASE_OBJECT_DEFINITION_SAVE_DIRECTORY + tableName + OracleObjectType.TABLE.getSuffix();
                            logger.info("保存文件：" + filePath);
                            FileUtil.saveFile(filePath, definitionStringBuffer.toString());
                        }
                    }
                }
            } else {
                logger.warn("没有TABLE的定义");
            }
        } catch (Exception e) {
            e.printStackTrace();
        }
    }

    /**
     * 导出列注释
     */
    private void exportColumnComment() {
        try {
            List<UserColumnComment> userColumnCommentList = userColumnCommentDao.findByCommentNotNull();
            if (null != userColumnCommentList && userColumnCommentList.size() > 0) {
                StringBuffer stringBuffer = new StringBuffer();
                for (UserColumnComment uerColumnComment : userColumnCommentList) {
                    String commentCommand = String.format("comment on column SCOTT.%s.%s is '%s';\n", uerColumnComment.getKey().getTableName(), uerColumnComment.getKey().getColumnName(), uerColumnComment.getComments());
                    stringBuffer.append(commentCommand);
                }
                String filePath = DATABASE_OBJECT_DEFINITION_SAVE_DIRECTORY + UserColumnComment.class.getSimpleName() + OracleObjectType.COLUMN_COMMENT.getSuffix();
                logger.info("保存文件：" + filePath);
                FileUtil.saveFile(filePath, stringBuffer.toString());
            } else {
                logger.warn("没有列注释的定义");
            }
        } catch (Exception e) {
            e.printStackTrace();
        }
    }

    /**
     * 导出表注释
     */
    private void exportTableComment() {
        try {
            List<UserTableComment> userTableCommentList = userTableCommentDao.findByCommentNotNull();
            if (null != userTableCommentList && userTableCommentList.size() > 0) {
                StringBuffer stringBuffer = new StringBuffer();
                for (UserTableComment userTableComment : userTableCommentList) {
                    String commentCommand = String.format("comment on table SCOTT.%s is '%s';\n", userTableComment.getKey().getTableName(), userTableComment.getComments());
                    stringBuffer.append(commentCommand);
                }
                String filePath = DATABASE_OBJECT_DEFINITION_SAVE_DIRECTORY + UserTableComment.class.getSimpleName() + OracleObjectType.TABLE_COMMENT.getSuffix();
                logger.info("保存文件：" + filePath);
                FileUtil.saveFile(filePath, stringBuffer.toString());
            } else {
                logger.warn("没有表注释的定义");
            }
        } catch (Exception e) {
            e.printStackTrace();
        }
    }

    /**
     * 导出INDEX
     */
    private void exportIndex() {
        try {
            List<UserIndexes> userIndexesList = userIndexesDao.findAll();
            if (null != userIndexesList && userIndexesList.size() > 0) {
                for (UserIndexes userIndexes : userIndexesList) {
                    // SYS_IL开头的索引是oracle自己来维护的，不用导出
                    if (userIndexes.getIndexName().startsWith("SYS_IL")) {
                        logger.warn("索引[" + userIndexes.getIndexName() + "]不导出");
                        continue;
                    }
                    String definition = projectDao.findDefinitionByObjectTypeAndObjectName(OracleObjectType.INDEX.getObjectTypeName(), userIndexes.getIndexName());
                    String filePath = DATABASE_OBJECT_DEFINITION_SAVE_DIRECTORY + userIndexes.getIndexName() + OracleObjectType.INDEX.getSuffix();
                    logger.info("保存文件：" + filePath);
                    FileUtil.saveFile(filePath, definition);
                }
            } else {
                logger.warn("没有INDEX的定义");
            }
        } catch (Exception e) {
            e.printStackTrace();
        }
    }

    /**
     * 导出FUNCTION
     */
    private void exportFunction() {
        try {
            List<String> functionNameList = projectDao.findObjectNameListByObjectType(OracleObjectType.FUNCTION.getObjectType());
            if (null != functionNameList && functionNameList.size() > 0) {
                for (String functionName : functionNameList) {
                    String definition = projectDao.findDefinitionByObjectTypeAndObjectName(OracleObjectType.FUNCTION.getObjectTypeName(), functionName);
                    // 删除开头3个无法识别的字符
                    if (definition.substring(0, 3).equals("\n  ")) {
                        definition = definition.substring(3);
                    }
                    String filePath = DATABASE_OBJECT_DEFINITION_SAVE_DIRECTORY + functionName + OracleObjectType.FUNCTION.getSuffix();
                    logger.info("保存文件：" + filePath);
                    FileUtil.saveFile(filePath, definition);
                }
            } else {
                logger.warn("没有FUNCTION的定义");
            }
        } catch (Exception e) {
            e.printStackTrace();
        }
    }

    /**
     * 导出PROCEDURE
     */
    private void exportProcedure() {
        try {
            List<String> procedureNameList = projectDao.findObjectNameListByObjectType(OracleObjectType.PROCEDURE.getObjectType());
            if (null != procedureNameList && procedureNameList.size() > 0) {
                for (String procedureName : procedureNameList) {
                    String definition = projectDao.findDefinitionByObjectTypeAndObjectName(OracleObjectType.PROCEDURE.getObjectTypeName(), procedureName);
                    // 删除开头3个无法识别的字符
                    if (definition.substring(0, 3).equals("\n  ")) {
                        definition = definition.substring(3);
                    }
                    String filePath = DATABASE_OBJECT_DEFINITION_SAVE_DIRECTORY + procedureName + OracleObjectType.PROCEDURE.getSuffix();
                    logger.info("保存文件：" + filePath);
                    FileUtil.saveFile(filePath, definition);
                }
            } else {
                logger.warn("没有PROCEDURE的定义");
            }
        } catch (Exception e) {
            e.printStackTrace();
        }
    }

    /**
     * 导出PACKAGE
     */
    private void exportPackage() {
        try {
            List<String> packNameList = projectDao.findObjectNameListByObjectType(OracleObjectType.PACKAGE.getObjectType());
            if (null != packNameList && packNameList.size() > 0) {
                for (String packageName : packNameList) {
                    String definition = projectDao.findDefinitionByObjectTypeAndObjectName(OracleObjectType.PACKAGE.getObjectTypeName(), packageName);
                    // 删除开头3个无法识别的字符
                    if (definition.substring(0, 3).equals("\n  ")) {
                        definition = definition.substring(3);
                    }

                    // 只取定义，舍去主体
                    int firstIndex = definition.indexOf("CREATE OR REPLACE");
                    int lastIndex = definition.lastIndexOf("CREATE OR REPLACE");
                    if (firstIndex != lastIndex) {
                        definition = definition.substring(0, lastIndex);
                    }

                    // 保存定义
                    if (StringUtils.isNotEmpty(definition)) {
                        String filePath = DATABASE_OBJECT_DEFINITION_SAVE_DIRECTORY + packageName + OracleObjectType.PACKAGE.getSuffix();
                        logger.info("保存文件：" + filePath);
                        FileUtil.saveFile(filePath, definition);
                    }
                }
            } else {
                logger.warn("没有PACKAGE的定义");
            }
        } catch (Exception e) {
            e.printStackTrace();
        }
    }

    /**
     * 导出PACKAGE BODY
     */
    private void exportPackageBody() {
        try {
            List<String> packageBodyNameList = projectDao.findObjectNameListByObjectType(OracleObjectType.PACKAGE_BODY.getObjectType());
            if (null != packageBodyNameList && packageBodyNameList.size() > 0) {
                for (String packageBodyName : packageBodyNameList) {
                    String definition = projectDao.findDefinitionByObjectTypeAndObjectName(OracleObjectType.PACKAGE_BODY.getObjectTypeName(), packageBodyName);
                    // 删除开头3个无法识别的字符
                    if (definition.substring(0, 3).equals("\n  ")) {
                        definition = definition.substring(3);
                    }

                    // 保存定义
                    if (StringUtils.isNotEmpty(definition)) {
                        String filePath = DATABASE_OBJECT_DEFINITION_SAVE_DIRECTORY + packageBodyName + OracleObjectType.PACKAGE_BODY.getSuffix();
                        logger.info("保存文件：" + filePath);
                        FileUtil.saveFile(filePath, definition);
                    }
                }
            } else {
                logger.warn("没有PACKAGE BODY的定义");
            }
        } catch (Exception e) {
            e.printStackTrace();
        }
    }

    /**
     * 导入数据库对象
     */
    @Override
    public void importDatabaseObject() {
        logger.info("导入数据库对象");

        File dir = new File(DATABASE_OBJECT_DEFINITION_SAVE_DIRECTORY);
        if (dir.exists()) {
            // 不必重复执行的文件列表
            List<String> excludeExceptionList = new ArrayList<>();
            excludeExceptionList.add("ORA-00955: 名称已由现有对象使用\n");
            excludeExceptionList.add("ORA-01408: 此列列表已索引\n");
            // 删除序列时使用
            excludeExceptionList.add("ORA-32794: 无法删除系统生成的序列\n");
            // 删除、导入列的注释时使用
            excludeExceptionList.add("ORA-00903: 表名无效\n");
//            excludeExceptionList.add("ORA-00905: 缺失关键字\n");

            // 执行失败的文件列表
            List<File> failFileList = new ArrayList<>();

            // 文件分类
            List<File> fileList = Arrays.asList(dir.listFiles());
            List<File> packageBodyFileList = new ArrayList<>();
            List<File> functionFileList = new ArrayList<>();
            List<File> procedureFileList = new ArrayList<>();
            List<File> sequenceFileList = new ArrayList<>();
            List<File> packageFileList = new ArrayList<>();
            List<File> tableFileList = new ArrayList<>();
            List<File> typeBodyFileList = new ArrayList<>();
            List<File> typeFileList = new ArrayList<>();
            List<File> indexFileList = new ArrayList<>();
            List<File> tableCommentFileList = new ArrayList<>();
            List<File> columnCommentFileList = new ArrayList<>();
            if (null != fileList && fileList.size() > 0) {
                for (File file : fileList) {
                    String fileName = file.getName();
                    String suffix = fileName.replaceAll(".*(\\..*)", "$1");
                    if (suffix.equals(OracleObjectType.PACKAGE_BODY.getSuffix())) {
                        packageBodyFileList.add(file);
                    }
                    if (suffix.equals(OracleObjectType.FUNCTION.getSuffix())) {
                        functionFileList.add(file);
                    }
                    if (suffix.equals(OracleObjectType.PROCEDURE.getSuffix())) {
                        procedureFileList.add(file);
                    }
                    if (suffix.equals(OracleObjectType.SEQUENCE.getSuffix())) {
                        sequenceFileList.add(file);
                    }
                    if (suffix.equals(OracleObjectType.PACKAGE.getSuffix())) {
                        packageFileList.add(file);
                    }
                    if (suffix.equals(OracleObjectType.TABLE.getSuffix())) {
                        tableFileList.add(file);
                    }
                    if (suffix.equals(OracleObjectType.TYPE_BODY.getSuffix())) {
                        typeBodyFileList.add(file);
                    }
                    if (suffix.equals(OracleObjectType.TYPE.getSuffix())) {
                        typeFileList.add(file);
                    }
                    if (suffix.equals(OracleObjectType.INDEX.getSuffix())) {
                        indexFileList.add(file);
                    }
                    if (suffix.equals(OracleObjectType.TABLE_COMMENT.getSuffix())) {
                        tableCommentFileList.add(file);
                    }
                    if (suffix.equals(OracleObjectType.COLUMN_COMMENT.getSuffix())) {
                        columnCommentFileList.add(file);
                    }
                }
            }

            // 删除所有TYPE和TYPE BODY，注意要先删除_ARRAY结尾的
            this.deleteTypeAndTypeBody();
            // 导入TYPE
            this.importType(typeFileList, excludeExceptionList, failFileList);
            // 导入TYPE BODY
            this.importTypeBody(typeBodyFileList, excludeExceptionList, failFileList);

            // 删除所有SEQUENCE
            this.deleteSequence(excludeExceptionList);
            // 导入SEQUENCE
            this.importSequence(sequenceFileList, excludeExceptionList, failFileList);

            // 删除所有TABLE
            this.deleteTable(excludeExceptionList);
            // 导入TABLE
            this.importTable(tableFileList, excludeExceptionList, failFileList);

            // 删除所有的列的COMMENT
            this.deleteColumnComment(excludeExceptionList);
            // 导入列COMMENT
            this.importColumnComment(columnCommentFileList, excludeExceptionList, failFileList);

            // 删除所有的INDEX
            this.deleteIndex();
            // 导入INDEX
            this.importIndex(indexFileList, excludeExceptionList, failFileList);

            // 删除所有的FUNCTION
            this.deleteFunction();
            // 导入FUNCTION
            this.importFunction(functionFileList, excludeExceptionList, failFileList);

            // 删除所有的PROCEDURE
            this.deleteProcedure();
            // 导入PROCEDURE
            this.importProcedure(procedureFileList, excludeExceptionList, failFileList);

            // 删除所有PACKAGE和PACKAGE BODY
            this.deletePackageAndPackageBody();
            // 导入PACKAGE
            this.importPackage(packageFileList, excludeExceptionList, failFileList);
            // 导入PACKAGE BODY
            this.importPackageBody(packageBodyFileList, excludeExceptionList, failFileList);

            logger.warn("执行失败的文件：" + failFileList.toString());
        } else {
            logger.warn("目录[" + DATABASE_OBJECT_DEFINITION_SAVE_DIRECTORY + "]下面没有文件");
        }
    }

    /**
     * 删除所有TYPE和TYPE BODY，注意要先删除_ARRAY结尾的
     */
    private void deleteTypeAndTypeBody() {
        try {
            List<String> typeList = projectDao.findObjectNameListByObjectType(OracleObjectType.TYPE.getObjectTypeName());

            if (null != typeList && typeList.size() > 0) {
                CopyOnWriteArraySet<String> copyOnWriteArraySet = new CopyOnWriteArraySet<>(typeList);

                // 先删除_ARRAY结尾的
                for (String type : copyOnWriteArraySet) {
                    if (type.endsWith("_ARRAY")) {
                        projectDao.deleteType(type);
                        copyOnWriteArraySet.remove(type);
                    }
                }

                // 删除剩余的TYPE
                for (String type : copyOnWriteArraySet) {
                    projectDao.deleteType(type);
                    copyOnWriteArraySet.remove(type);
                }
            }
        } catch (Exception e) {
            e.printStackTrace();
        }
    }

    /**
     * 导入TYPE
     *
     * @param typeFileList
     * @param excludeExceptionList
     * @param failFileList
     */
    private void importType(List<File> typeFileList, List<String> excludeExceptionList, List<File> failFileList) {
        if (null != typeFileList && typeFileList.size() > 0) {
            for (File file : typeFileList) {
                Scanner scanner = null;
                StringBuffer stringBuffer = new StringBuffer();
                try {
                    scanner = new Scanner(new FileReader(file));
                    while (scanner.hasNextLine()) {
                        // 用转义字符替换，否则报错
                        // type要有回车换行，否则如果有注释的话，会报错
                        stringBuffer.append(scanner.nextLine()/*.replace(":=", "\\:=")*/ + "\n");
                    }
                    projectDao.executeSql(stringBuffer.toString());
                } catch (Exception e) {
                    e.printStackTrace();
                    failFileList.add(file);
                }
            }
        }
    }

    /**
     * 导入TYPE BODY
     *
     * @param typeBodyFileList
     * @param excludeExceptionList
     * @param failFileList
     */
    private void importTypeBody(List<File> typeBodyFileList, List<String> excludeExceptionList, List<File> failFileList) {
        if (null != typeBodyFileList && typeBodyFileList.size() > 0) {
            for (File file : typeBodyFileList) {
                Scanner scanner = null;
                StringBuffer stringBuffer = new StringBuffer();
                try {
                    scanner = new Scanner(new FileReader(file));
                    while (scanner.hasNextLine()) {
                        // 用转义字符替换，否则报错
                        stringBuffer.append(scanner.nextLine()/*.replace(":=", "\\:=")*/);
                    }
                    projectDao.executeSql(stringBuffer.toString());
                } catch (Exception e) {
                    e.printStackTrace();
                    failFileList.add(file);
                }
            }
        }
    }

    /**
     * 删除所有PACKAGE和PACKAGE BODY
     */
    private void deletePackageAndPackageBody() {
        try {
            List<String> packageList = projectDao.findObjectNameListByObjectType(OracleObjectType.PACKAGE.getObjectTypeName());

            if (null != packageList && packageList.size() > 0) {
                CopyOnWriteArraySet<String> copyOnWriteArraySet = new CopyOnWriteArraySet<>(packageList);

                // 删除PACKAGE
                for (String packageName : copyOnWriteArraySet) {
                    projectDao.deletePackage(packageName);
                    copyOnWriteArraySet.remove(packageName);
                }
            }
        } catch (Exception e) {
//                    if (excludeExceptionList.contains(e.getMessage())) {
//                        continue;
//                    }
            e.printStackTrace();
        }
    }

    /**
     * 导入PACKAGE
     *
     * @param packageFileList
     * @param excludeExceptionList
     * @param failFileList
     */
    private void importPackage(List<File> packageFileList, List<String> excludeExceptionList, List<File> failFileList) {
        if (null != packageFileList && packageFileList.size() > 0) {
            for (File file : packageFileList) {
                Scanner scanner = null;
                StringBuffer stringBuffer = new StringBuffer();
                try {
                    scanner = new Scanner(new FileReader(file));
                    while (scanner.hasNextLine()) {
                        // type要有回车换行，否则如果有注释的话，会报错
                        stringBuffer.append(scanner.nextLine() + "\n");
                    }
                    projectDao.executeSql(stringBuffer.toString());
                } catch (Exception e) {
                    e.printStackTrace();
                    failFileList.add(file);
                }
            }
        }
    }

    /**
     * 导入PACKAGE BODY
     *
     * @param packageBodyFileList
     * @param excludeExceptionList
     * @param failFileList
     */
    private void importPackageBody(List<File> packageBodyFileList, List<String> excludeExceptionList, List<File> failFileList) {
        if (null != packageBodyFileList && packageBodyFileList.size() > 0) {
            for (File file : packageBodyFileList) {
                Scanner scanner = null;
                StringBuffer stringBuffer = new StringBuffer();
                try {
                    scanner = new Scanner(new FileReader(file));
                    while (scanner.hasNextLine()) {
                        stringBuffer.append(scanner.nextLine()/*.replace(":=", "\\:=")*/ + "\n");
//                        stringBuffer.append(scanner.nextLine().replace(":=", "/*'*/:=/*'*/") + "\n");
                    }
                    projectDao.executeSql(stringBuffer.toString());
                } catch (Exception e) {
//                    stringBuffer = new StringBuffer(stringBuffer.toString().replace("\\:=", ":="));
//                    try {
//                        projectDao.executeSql(stringBuffer.toString());
//                    } catch (Exception ex) {
//                        ex.printStackTrace();
//                    }

                    e.printStackTrace();
                    failFileList.add(file);
                }
            }
        }
    }

    /**
     * 删除所有SEQUENCE
     */
    private void deleteSequence(List<String> excludeExceptionList) {
        List<String> sequenceList = projectDao.findObjectNameListByObjectType(OracleObjectType.SEQUENCE.getObjectTypeName());

        if (null != sequenceList && sequenceList.size() > 0) {
            CopyOnWriteArraySet<String> copyOnWriteArraySet = new CopyOnWriteArraySet<>(sequenceList);
            for (String sequence : copyOnWriteArraySet) {
                try {
                    projectDao.deleteSequence(sequence);
                } catch (Exception e) {
                    // ORA-32794: 无法删除系统生成的序列。因为表或触发器正在使用这些序列，因此在删除相关的表或者触发器之前无法删除这些序列
                    if (excludeExceptionList.get(2).equals(e.getMessage())) {
                        logger.warn("无法删除的序列名：" + sequence);
                        continue;
                    }
                    e.printStackTrace();
                }
                copyOnWriteArraySet.remove(sequence);
            }
        }
    }

    /**
     * 导入SEQUENCE
     *
     * @param sequenceFileList
     * @param excludeExceptionList
     * @param failFileList
     */
    private void importSequence(List<File> sequenceFileList, List<String> excludeExceptionList, List<File> failFileList) {
        if (null != sequenceFileList && sequenceFileList.size() > 0) {
            for (File file : sequenceFileList) {
                Scanner scanner = null;
                StringBuffer stringBuffer = new StringBuffer();
                try {
                    scanner = new Scanner(new FileReader(file));
                    while (scanner.hasNextLine()) {
                        // sequence要连成一行，并且加空格，否则会报错
                        stringBuffer.append(scanner.nextLine() + " ");
                    }
                    // sequence要去掉最后的分号，否则会报错
                    stringBuffer = new StringBuffer(stringBuffer.toString().replace(";", ""));
                    projectDao.executeSql(stringBuffer.toString());
                } catch (Exception e) {
                    if (excludeExceptionList.get(0).equals(e.getMessage())) {
                        continue;
                    }
                    e.printStackTrace();
                    failFileList.add(file);
                }
            }
        }
    }

    /**
     * 删除所有TABLE
     */
    private void deleteTable(List<String> excludeExceptionList) {
        List<String> tableList = projectDao.findObjectNameListByObjectType(OracleObjectType.TABLE.getObjectTypeName());

        if (null != tableList && tableList.size() > 0) {
            while (true) {
                if (tableList.size() == 0) {
                    break;
                }

                Collections.shuffle(tableList);
                CopyOnWriteArraySet<String> copyOnWriteArraySet = new CopyOnWriteArraySet<>(tableList);
                for (String table : tableList) {
                    try {
                        projectDao.deleteTable(table);
                    } catch (Exception e) {
                        e.printStackTrace();
                        continue;
                    }
                    copyOnWriteArraySet.remove(table);
                }

                tableList = new ArrayList<>();
                copyOnWriteArraySet.forEach(tableList::add);
            }
        }
    }

    /**
     * 导入TABLE
     *
     * @param tableFileList
     * @param excludeExceptionList
     * @param failFileList
     */
    private void importTable(List<File> tableFileList, List<String> excludeExceptionList, List<File> failFileList) {
        if (null != tableFileList && tableFileList.size() > 0) {
            for (File file : tableFileList) {
                Scanner scanner = null;
                StringBuffer stringBuffer = new StringBuffer();
                try {
                    scanner = new Scanner(new FileReader(file));
                    while (scanner.hasNextLine()) {
                        stringBuffer.append(scanner.nextLine());
                    }
                    projectDao.executeSql(stringBuffer.toString());
                } catch (Exception e) {
                    e.printStackTrace();
                    failFileList.add(file);
                }
            }
        }
    }

    /**
     * 删除所有的列的COMMENT
     */
    private void deleteColumnComment(List<String> excludeExceptionList) {
        List<UserColumnComment> userColumnCommentList = userColumnCommentDao.findByCommentNotNull();
        if (null != userColumnCommentList && userColumnCommentList.size() > 0) {
            for (UserColumnComment userColumnComment : userColumnCommentList) {
                StringBuffer stringBuffer = new StringBuffer();
                stringBuffer.append("comment on column ").append(userColumnComment.getKey().getTableName())
                        .append(".").append(userColumnComment.getKey().getColumnName())
                        .append(" is '';");
                try {
                    projectDao.executeSql(stringBuffer.toString());
                } catch (Exception e) {
                    if (excludeExceptionList.get(3).equals(e.getMessage())) {
                        logger.warn("表名无效的表名：" + userColumnComment.getKey().getTableName());
                        continue;
                    }
                    e.printStackTrace();
                }
            }
        }
    }

    /**
     * 导入列COMMENT
     */
    private void importColumnComment(List<File> columnCommentFileList, List<String> excludeExceptionList, List<File> failFileList) {
        if (null != columnCommentFileList && columnCommentFileList.size() > 0) {
            for (File file : columnCommentFileList) {
                Scanner scanner = null;
                try {
                    scanner = new Scanner(new FileReader(file));
                } catch (FileNotFoundException e) {
                    e.printStackTrace();
                }
                StringBuffer stringBuffer = new StringBuffer();
                while (scanner.hasNextLine()) {
                    stringBuffer.append(scanner.nextLine());
                    // 替换单引号
                    String sql = stringBuffer.toString().replace("'", "\'");
                    // 去掉最后的分号
                    sql = sql.substring(0, sql.length() - 1);
                    try {
                        // 有的注释不止一行
                        if (sql.endsWith("'")){
                            projectDao.executeSql(sql);
                            stringBuffer = new StringBuffer();
                        }
                    } catch (Exception e) {
//                        if (excludeExceptionList.get(4).equals(e.getMessage())) {
//                            logger.warn("缺失关键字：" + sql);
//                            stringBuffer = new StringBuffer();
//                            continue;
//                        }
                        e.printStackTrace();
                        failFileList.add(file);
                    }
                }
            }
        }
    }

    /**
     * 删除所有的INDEX
     */
    private void deleteIndex() {
        List<UserIndexes> userIndexesList = userIndexesDao.findAll();
        if (null != userIndexesList && userIndexesList.size() > 0) {
            for (UserIndexes userIndexes : userIndexesList) {
                try {
                    if (userIndexes.getIndexName().startsWith("SYS_IL")) {
                        logger.warn("索引[" + userIndexes.getIndexName() + "]不导出");
                        continue;
                    }
                    userIndexesDao.delete(userIndexes.getIndexName());
                } catch (Exception e) {
                    e.printStackTrace();
                }
            }
        }
    }

    /**
     * 导入INDEX
     */
    private void importIndex(List<File> indexFileList, List<String> excludeExceptionList, List<File> failFileList) {
        if (null != indexFileList && indexFileList.size() > 0) {
            for (File file : indexFileList) {
                Scanner scanner = null;
                try {
                    scanner = new Scanner(new FileReader(file));
                    StringBuffer stringBuffer = new StringBuffer();
                    while (scanner.hasNextLine()) {
                        stringBuffer.append(scanner.nextLine());
                    }
                    projectDao.executeSql(stringBuffer.toString());
                } catch (Exception e) {
                    e.printStackTrace();
                    failFileList.add(file);
                }
            }
        }
    }

    /**
     * 删除所有的FUNCTION
     */
    private void deleteFunction() {
        List<String> functionNameList = projectDao.findObjectNameListByObjectType(OracleObjectType.FUNCTION.getObjectTypeName());
        if (null != functionNameList && functionNameList.size() > 0) {
            for (String functionName : functionNameList) {
                try {
                    projectDao.deleteFunction(functionName);
                } catch (Exception e) {
                    e.printStackTrace();
                }
            }
        }
    }

    /**
     * 导入FUNCTION
     *
     * @param functionFileList
     * @param excludeExceptionList
     * @param failFileList
     */
    private void importFunction(List<File> functionFileList, List<String> excludeExceptionList, List<File> failFileList) {
        if (null != functionFileList && functionFileList.size() > 0) {
            for (File file : functionFileList) {
                Scanner scanner = null;
                try {
                    scanner = new Scanner(new FileReader(file));
                    StringBuffer stringBuffer = new StringBuffer();
                    while (scanner.hasNextLine()) {
                        stringBuffer.append(scanner.nextLine()/*.replace(":=", "\\:=")*/ + "\n");
                    }
                    projectDao.executeSql(stringBuffer.toString());
                } catch (Exception e) {
                    e.printStackTrace();
                    failFileList.add(file);
                }
            }
        }
    }

    /**
     * 删除所有PROCEDURE
     */
    private void deleteProcedure() {
        List<String> procedureNameList = projectDao.findObjectNameListByObjectType(OracleObjectType.PROCEDURE.getObjectTypeName());
        if (null != procedureNameList && procedureNameList.size() > 0) {
            for (String procedureName : procedureNameList) {
                try {
                    projectDao.deleteProcedure(procedureName);
                } catch (Exception e) {
                    e.printStackTrace();
                }
            }
        }
    }

    /**
     * 导入PROCEDURE
     *
     * @param procedureFileList
     * @param excludeExceptionList
     * @param failFileList
     */
    private void importProcedure(List<File> procedureFileList, List<String> excludeExceptionList, List<File> failFileList) {
        if (null != procedureFileList && procedureFileList.size() > 0) {
            for (File file : procedureFileList) {
                Scanner scanner = null;
                try {
                    scanner = new Scanner(new FileReader(file));
                    StringBuffer stringBuffer = new StringBuffer();
                    while (scanner.hasNextLine()) {
                        stringBuffer.append(scanner.nextLine()/*.replace(":=", "\\:=")*/ + "\n");
                    }
                    projectDao.executeSql(stringBuffer.toString());
                } catch (Exception e) {
                    e.printStackTrace();
                    failFileList.add(file);
                }
            }
        }
    }

/*********************************************************************************************************************
 *
 * 									            使操作系统进入睡眠状态
 *
 *********************************************************************************************************************/
    /**
     * 使操作系统进入睡眠状态
     */
    @Override
    public void makeOperationSystemSleep() {
        logger.info("开始使操作系统进入睡眠状态");

        try {
            Runtime.getRuntime().exec("rundll32.exe powrprof.dll,SetSuspendState");
        } catch (IOException e) {
            throw new RuntimeException(e);
        }

        logger.info("使操作系统进入睡眠状态完成");
    }
/*********************************************************************************************************************
 *
 * 									                          备份数据库/表 （海量和增量）
 *
 *********************************************************************************************************************/
    /**
     * 海量备份数据库，每次只备份一部分
     */
    public void backupDatabaseMass() {
        logger.info("海量备份数据库，每次只备份一部分，将其导出为.dmp文件");

        String[] tableNameArray = PropertiesUtil.getValue(BACKUP_PROPERTIES, "backup.tables")
                .replace("[", "").replace("]", "").split(",");
        String oversizeTables = PropertiesUtil.getValue(BACKUP_PROPERTIES, "backup.tables.oversizeTables")
                .replace("[", "").replace("]", "").trim();

        // 大小正常的dmp文件
        if (null != tableNameArray && tableNameArray.length > 0) {
            for (int i = 0; i < tableNameArray.length; i++) {
                if (StringUtils.isNotEmpty(tableNameArray[i].trim())) {
                    String tableName = tableNameArray[i].trim();
                    String CMD = "exp scott/tiger@ADAM file=" + DUMP_BACKUP_DIR + tableName
                            + ".dmp log=" + tableName + ".log tables="
                            + tableName;
                    try {
                        Runtime.getRuntime().exec("cmd /c start " + CMD);
                        logger.info("海量备份数据库完成。备份命令：" + CMD);
                        Thread.sleep(30000);
                    } catch (IOException e) {
                        e.printStackTrace();
                    } catch (InterruptedException e) {
                        e.printStackTrace();
                    }
                }
            }
        }

        // 过大的dmp文件
        try {
            Runtime.getRuntime().exec("cmd /c start " + oversizeTables);
            logger.info("海量备份数据库完成。备份命令：" + oversizeTables);
            Thread.sleep(30000);
        } catch (IOException e) {
            e.printStackTrace();
        } catch (InterruptedException e) {
            e.printStackTrace();
        }
    }

    /**
     * 增量备份数据库的各个表
     * 注意：由于各个表的表结构不同，在备份和恢复时，也是采取的不同策略。
     * 每个方法的注解中都详细地写清楚了备份和恢复的策略，因此，需要仔细阅读每个方法的注解才能了解具体的策略。
     *
     * @param multithreading
     */
    public void backupDatabaseIncrementalByJson(boolean multithreading) {
        logger.info("开始使用json增量备份数据库");

        String tableNames = PropertiesUtil.getValue(BACKUP_PROPERTIES, "backup.database.table_name");
        String dayBeginDate = PropertiesUtil.getValue(BACKUP_PROPERTIES, "backup.database.day.begin_date");
        String dayEndDate = PropertiesUtil.getValue(BACKUP_PROPERTIES, "backup.database.day.end_date");
        String weekBeginDate = PropertiesUtil.getValue(BACKUP_PROPERTIES, "backup.database.week.begin_date");
        String weekEndDate = PropertiesUtil.getValue(BACKUP_PROPERTIES, "backup.database.week.end_date");
        String monthBeginDate = PropertiesUtil.getValue(BACKUP_PROPERTIES, "backup.database.month.begin_date");
        String monthEndDate = PropertiesUtil.getValue(BACKUP_PROPERTIES, "backup.database.month.end_date");

        String[] tableNameArray = tableNames.replace("[", "").replace("]", "").replace(" ", "").split(",");
        for (int i = 0; i < tableNameArray.length; i++) {
            if (TableName.STOCK_TRANSACTION_DATA.getName().toString().equals(tableNameArray[i])) {
                // 增量备份表STOCK_TRANSACTION_DATA
                this.backupStockTransactionDataIncrementalByJson(multithreading, dayBeginDate, dayEndDate);
            }
            if (TableName.STOCK_INDEX.getName().trim().equals(tableNameArray[i])) {
                // 增量备份表STOCK_INDEX
                this.backupStockIndexIncrementalByJson(dayBeginDate, dayEndDate);
            }
            if (TableName.BOARD_INDEX.getName().trim().equals(tableNameArray[i])) {
                // 增量备份表BOARD_INDEX
                this.backupBoardIndexIncrementalByJson(dayBeginDate, dayEndDate);
            }
            if (TableName.REPORT.getName().trim().equals(tableNameArray[i])) {
                // 增量备份表REPORT
                this.backupReportIncrementalByJson(dayBeginDate, dayEndDate);
            }
            if (TableName.STOCK_WEEK.getName().trim().equals(tableNameArray[i])) {
                // 增量备份表STOCK_WEEK
                this.backupStockWeekIncrementalByJson(weekBeginDate, weekEndDate);
            }
            if (TableName.STOCK_INDEX_WEEK.getName().trim().equals(tableNameArray[i])) {
                // 增量备份表STOCK_INDEX_WEEK
                this.backupStockIndexWeekIncrementalByJson(weekBeginDate, weekEndDate);
            }

            if (TableName.MDL_ALL_GOLD_CROSS.getName().trim().equals(tableNameArray[i])) {
                // 增量备份表MDL_ALL_GOLD_CROSS
                this.backupModelAllGoldCrossIncrementalByJson(dayBeginDate, dayEndDate);
            }
            if (TableName.MDL_CLOSE_PRICE_MA5_GOLD_CROSS.getName().trim().equals(tableNameArray[i])) {
                // 增量备份表MDL_CLOSE_PRICE_MA5_GOLD_CROSS
                this.backupModelClosePriceMA5GoldCrossIncrementalByJson(dayBeginDate, dayEndDate);
            }
            if (TableName.MDL_CLOSE_PRICE_MA5_DEAD_CROSS.getName().trim().equals(tableNameArray[i])) {
                // 增量备份表MDL_CLOSE_PRICE_MA5_DEAD_CROSS
                this.backupModelClosePriceMA5DeadCrossIncrementalByJson(dayBeginDate, dayEndDate);
            }
            if (TableName.MDL_MACD_GOLD_CROSS.getName().trim().equals(tableNameArray[i])) {
                // 增量备份表MDL_MACD_GOLD_CROSS
                this.backupModelMACDGoldCrossIncrementalByJson(dayBeginDate, dayEndDate);
            }
            if (TableName.MDL_MACD_DEAD_CROSS.getName().trim().equals(tableNameArray[i])) {
                // 增量备份表MDL_MACD_DEAD_CROSS
                this.backupModelMACDDeadCrossIncrementalByJson(dayBeginDate, dayEndDate);
            }
            if (TableName.MDL_HEI_KIN_ASHI_UP_DOWN.getName().trim().equals(tableNameArray[i])) {
                // 增量备份表MDL_HEI_KIN_ASHI_UP_DOWN
                this.backupModelHeiKinAshiUpDownIncrementalByJson(dayBeginDate, dayEndDate);
            }
            if (TableName.MDL_HEI_KIN_ASHI_DOWN_UP.getName().trim().equals(tableNameArray[i])) {
                // 增量备份表MDL_HEI_KIN_ASHI_DOWN_UP
                this.backupModelHeiKinAshiDownUpIncrementalByJson(dayBeginDate, dayEndDate);
            }
            if (TableName.MDL_KD_GOLD_CROSS.getName().trim().equals(tableNameArray[i])) {
                // 增量备份表MDL_KD_GOLD_CROSS
                this.backupModelKDGoldCrossIncrementalByJson(dayBeginDate, dayEndDate);
            }
            if (TableName.MDL_KD_DEAD_CROSS.getName().trim().equals(tableNameArray[i])) {
                // 增量备份表MDL_KD_DEAD_CROSS
                this.backupModelKDDeadCrossIncrementalByJson(dayBeginDate, dayEndDate);
            }
            if (TableName.MDL_TOP_STOCK.getName().trim().equals(tableNameArray[i])) {
                // 增量备份表MDL_TOP_STOCK
                this.backupModelTopStockIncrementalByJson(dayBeginDate, dayEndDate);
            }
            if (TableName.MDL_TOP_STOCK_DETAIL.getName().trim().equals(tableNameArray[i])) {
                // 增量备份表MDL_TOP_STOCK_DETAIL
                this.backupModelTopStockDetailIncrementalByJson(dayBeginDate, dayEndDate);
            }
            if (TableName.MDL_STOCK_ANALYSIS.getName().trim().equals(tableNameArray[i])) {
                // 增量备份表MDL_STOCK_ANALYSIS
                this.backupModelStockAnalysisIncrementalByJson(dayBeginDate, dayEndDate);
            }
            if (TableName.MDL_STOCK_MONTH_ANALYSIS.getName().trim().equals(tableNameArray[i])) {
                // 增量备份表MDL_STOCK_MONTH_ANALYSIS
                this.backupModelStockMonthAnalysisIncrementalByJson(monthBeginDate, monthEndDate);
            }
            if (TableName.STOCK_MONTH.getName().trim().equals(tableNameArray[i])) {
                // 增量备份表STOCK_MONTH
                this.backupStockMonthIncrementalByJson(monthBeginDate, monthEndDate);
            }

            if (TableName.MDL_WEEK_KD_GOLD_CROSS.getName().trim().equals(tableNameArray[i])) {
                // 增量备份表MDL_WEEK_KD_GOLD_CROSS
                this.backupModelWeekKDGoldCrossIncrementalByJson(weekBeginDate, weekEndDate);
            }

            if (TableName.STOCK_INFO.getName().trim().equals(tableNameArray[i])) {
                // 增量备份表STOCK_INFO
                this.backupStockInfoIncrementalByJson();
            }
            if (TableName.BOARD.getName().trim().equals(tableNameArray[i])) {
                // 增量备份表BOARD（其实是一次全部备份）
                this.backupBoardIncrementalByJson();
            }
            if (TableName.MODEL.getName().trim().equals(tableNameArray[i])) {
                // 增量备份表MODEL
                this.backupModelIncrementalByJson();
            }
            if (TableName.FOREIGN_EXCHANGE.getName().trim().equals(tableNameArray[i])) {
                // 增量备份表FOREIGN_EXCHANGE（其实是一次全部备份）
                this.backupForeignExchangeIncrementalByJson();
            }
            if (TableName.FOREIGN_EXCHANGE_RECORD.getName().trim().equals(tableNameArray[i])) {
                // 增量备份表FOREIGN_EXCHANGE_RECORD
                this.backupForeignExchangeRecord(dayBeginDate, dayEndDate);
            }

            if (TableName.C_F_DATE_CONTRACT_DATA.getName().trim().equals(tableNameArray[i])) {
                // 增量备份表C_F_DATE_CONTRACT_DATA
                this.backupCommodityFutureDateContractDataRecord(dayBeginDate, dayEndDate);
            }
        }

        logger.info("使用json增量备份数据库结束");
    }

    /**
     * 增量备份表STOCK_TRANSACTION_DATA
     *
     * @param multithreading
     * @param beginDate
     * @param endDate
     */
    public void backupStockTransactionDataIncrementalByJson(boolean multithreading, String beginDate, String endDate) {
        logger.info("开始增量备份表STOCK_TRANSACTION_DATA，时间为【" + beginDate + "】至【" + endDate + "】");

        // 获取从开始时间beginDate到结束时间endDate内，STOCK_TRANSACTION_DATA表中的交易日期
        List<String> dateList = stockTransactionDataDao.getDateByCondition(multithreading, beginDate, endDate, false);
        for (int i = 0; i < dateList.size(); i++) {
            // 根据交易日期，获取那一日STOCK_TRANSACTION_DATA表中的所有交易记录
            List<StockTransactionData> stockList = stockTransactionDataDao.getStocksByDate(dateList.get(i));
            // Json文件的路径和文件名
            String path = new String(JSON_DATA_BACKUP_DIR + TableName.STOCK_TRANSACTION_DATA + "\\" + TableName.STOCK_TRANSACTION_DATA + "-" + DateUtil.dateToString(stockList.get(0).getDate()) + ".json");
            String jsonString = null;
            ObjectMapper objectMapper = new ObjectMapper();
            File file = new File(path);
            FileWriter fileWriter = null;
            try {
                fileWriter = new FileWriter(file, true);
            } catch (IOException e2) {
                e2.printStackTrace();
            }
            // 向json文件中写入对象
            for (int j = 0; j < stockList.size(); j++) {
                try {
                    jsonString = objectMapper.writeValueAsString(stockList.get(j));
                    fileWriter.append(jsonString + "\n");
                } catch (JsonGenerationException e) {
                    e.printStackTrace();
                } catch (JsonMappingException e) {
                    e.printStackTrace();
                } catch (IOException e) {
                    e.printStackTrace();
                }
            }
            try {

                fileWriter.close();
            } catch (IOException e1) {
                e1.printStackTrace();
            }

            ZipCompressor zipCompressor = new ZipCompressor(path.replace(".json", ".zip"));
            // 将json文件压缩为zip文件
            zipCompressor.compress(path);
            // 删除json文件
            FileUtil.deleteFile(path);
        }
    }

    /**
     * 增量备份表STOCK_WEEK
     * 每只股票的周线级别的记录的BEGIN_DATE不一定是星期一，END_DATE也不一定是星期五。
     * 备份时是用END_DATE字段为标准，每个END_DATE字段值相同的为一个zip文件。
     * 有时多个zip文件一起构成了某一周的所有记录。
     *
     * @param beginDate
     * @param endDate
     */
    public void backupStockWeekIncrementalByJson(String beginDate, String endDate) {
        logger.info("开始增量备份表STOCK_WEEK，时间为【" + beginDate + "】至【" + endDate + "】");

        // 获取从开始时间beginDate到结束时间endDate内，STOCK_WEEKEND表中的日期
        List<Date> dateList = stockWeekDao.getDateByCondition(beginDate, endDate);
        for (int i = 0; i < dateList.size(); i++) {
            // 根据交易日期，获取那一日STOCK_WEEK表中的所有记录
            List<StockWeek> stockWeekList = stockWeekDao.getStockWeeksByDate(DateUtil.dateToString(dateList.get(i)));
            // Json文件的路径和文件名
            String path = new String(JSON_DATA_BACKUP_DIR + TableName.STOCK_WEEK + "\\" + TableName.STOCK_WEEK + "-" + DateUtil.dateToString(((StockWeek) stockWeekList.get(0)).getEndDate()) + ".json");
            String jsonString = null;
            ObjectMapper objectMapper = new ObjectMapper();
            File file = new File(path);
            FileWriter fileWriter = null;
            try {
                fileWriter = new FileWriter(file, true);
            } catch (IOException e2) {
                e2.printStackTrace();
            }
            // 向json文件中写入对象
            for (int j = 0; j < stockWeekList.size(); j++) {
                try {
                    jsonString = objectMapper.writeValueAsString(stockWeekList.get(j));
                    fileWriter.append(jsonString + "\n");
                } catch (JsonGenerationException e) {
                    e.printStackTrace();
                } catch (JsonMappingException e) {
                    e.printStackTrace();
                } catch (IOException e) {
                    e.printStackTrace();
                }
            }
            try {

                fileWriter.close();
            } catch (IOException e1) {
                e1.printStackTrace();
            }

            ZipCompressor zipCompressor = new ZipCompressor(path.replace(".json", ".zip"));
            // 将json文件压缩为zip文件
            zipCompressor.compress(path);
            // 删除json文件
            FileUtil.deleteFile(path);
        }
    }

    /**
     * 增量备份表STOCK_INDEX
     *
     * @param beginDate
     * @param endDate
     */
    public void backupStockIndexIncrementalByJson(String beginDate, String endDate) {
        logger.info("增量备份表STOCK_INDEX，时间为【" + beginDate + "】至【" + endDate + "】");

        // 获取从开始时间beginDate到结束时间endDate内，STOCK_INDEX表中的日期
        List<Date> dateList = stockIndexDao.getDateByCondition(beginDate, endDate);
        for (int i = 0; i < dateList.size(); i++) {
            // 根据交易日期，获取那一日STOCK_INDEX表中的所有记录
            List<StockIndex> stockIndexList = stockIndexDao.getStockIndexByDate(DateUtil.dateToString(dateList.get(i)));
            // Json文件的路径和文件名
            String path = new String(JSON_DATA_BACKUP_DIR + TableName.STOCK_INDEX + "\\" + TableName.STOCK_INDEX + "-" + DateUtil.dateToString(((StockIndex) stockIndexList.get(0)).getDate()) + ".json");
            String jsonString = null;
            ObjectMapper objectMapper = new ObjectMapper();
            File file = new File(path);
            FileWriter fileWriter = null;
            try {
                fileWriter = new FileWriter(file, true);

                // 向json文件中写入对象
                for (int j = 0; j < stockIndexList.size(); j++) {
                    jsonString = objectMapper.writeValueAsString(stockIndexList.get(j));
                    fileWriter.append(jsonString + "\n");
                }
            } catch (JsonGenerationException e) {
                e.printStackTrace();
            } catch (JsonMappingException e) {
                e.printStackTrace();
            } catch (IOException e) {
                e.printStackTrace();
            } finally {
                try {
                    fileWriter.close();
                } catch (IOException e) {
                    e.printStackTrace();
                }
            }

            ZipCompressor zipCompressor = new ZipCompressor(path.replace(".json", ".zip"));
            // 将json文件压缩为zip文件
            zipCompressor.compress(path);

            // 删除json文件
            FileUtil.deleteFile(path);
            logger.info("删除文件【" + path + "】");
        }

    }

    /**
     * 增量备份表STOCK_INDEX_WEEK
     *
     * @param beginDate
     * @param endDate
     */
    public void backupStockIndexWeekIncrementalByJson(String beginDate, String endDate) {
        logger.info("增量备份表STOCK_INDEX_WEEK，时间为【" + beginDate + "】至【" + endDate + "】");

        // 根据开始日期beginDate和结束日期endDate，获取STOCK_INDEX_WEEK表中的所有记录
        List<StockIndexWeek> stockIndexWeekList = stockIndexWeekDao.getStockIndexWeekWithinDate(beginDate, endDate);
        // Json文件的路径和文件名
        String path = new String(JSON_DATA_BACKUP_DIR + TableName.STOCK_INDEX_WEEK + "\\" + TableName.STOCK_INDEX_WEEK + "-" + beginDate + "-" + endDate + ".json");
        String jsonString = null;
        ObjectMapper objectMapper = new ObjectMapper();
        File file = new File(path);
        FileWriter fileWriter = null;
        try {
            fileWriter = new FileWriter(file, true);
            // 向json文件中写入对象
            for (int j = 0; j < stockIndexWeekList.size(); j++) {
                jsonString = objectMapper.writeValueAsString(stockIndexWeekList.get(j));
                fileWriter.append(jsonString + "\n");
            }
        } catch (JsonGenerationException e) {
            e.printStackTrace();
        } catch (JsonMappingException e) {
            e.printStackTrace();
        } catch (IOException e) {
            e.printStackTrace();
        } finally {
            try {
                fileWriter.close();
            } catch (IOException e) {
                e.printStackTrace();
            }
        }

        ZipCompressor zipCompressor = new ZipCompressor(path.replace(".json", ".zip"));
        // 将json文件压缩为zip文件
        zipCompressor.compress(path);

        // 删除json文件
        FileUtil.deleteFile(path);
        logger.info("删除文件【" + path + "】");
    }

    /**
     * 增量备份表BOARD_INDEX
     *
     * @param beginDate
     * @param endDate
     */
    public void backupBoardIndexIncrementalByJson(String beginDate, String endDate) {
        logger.info("增量备份表BOARD_INDEX，时间为【" + beginDate + "】至【" + endDate + "】");

        // 获取从开始时间beginDate到结束时间endDate内，BOARD_INDEX表中的日期
        List<String> dateList = boardIndexDao.getDateByCondition(beginDate, endDate);
        for (int i = 0; i < dateList.size(); i++) {
            // 根据交易日期，获取那一日BOARD_INDEX表中的所有记录
            List<BoardIndex> boardIndexList = boardIndexDao.getBoardIndexsByDate(dateList.get(i));
            // Json文件的路径和文件名
            String path = new String(JSON_DATA_BACKUP_DIR + TableName.BOARD_INDEX + "\\" + TableName.BOARD_INDEX + "-" + DateUtil.dateToString(boardIndexList.get(0).getDate()) + ".json");
            String jsonString = null;
            ObjectMapper objectMapper = new ObjectMapper();
            File file = new File(path);
            FileWriter fileWriter = null;
            try {
                fileWriter = new FileWriter(file, true);
                // 向json文件中写入对象
                for (int j = 0; j < boardIndexList.size(); j++) {
                    jsonString = objectMapper.writeValueAsString(boardIndexList.get(j));
                    fileWriter.append(jsonString + "\n");
                }
            } catch (JsonGenerationException e) {
                e.printStackTrace();
            } catch (JsonMappingException e) {
                e.printStackTrace();
            } catch (IOException e) {
                e.printStackTrace();
            } finally {
                try {
                    fileWriter.close();
                } catch (IOException e1) {
                    e1.printStackTrace();
                }
            }

            ZipCompressor zipCompressor = new ZipCompressor(path.replace(".json", ".zip"));
            // 将json文件压缩为zip文件
            zipCompressor.compress(path);

            // 删除json文件
            FileUtil.deleteFile(path);
            logger.info("删除文件【" + path + "】");
        }
    }

    /**
     * 增量备份表REPORT
     *
     * @param beginDate
     * @param endDate
     */
    public void backupReportIncrementalByJson(String beginDate, String endDate) {
        logger.info("增量备份表REPORT，时间为【" + beginDate + "】至【" + endDate + "】");

        // 获取从开始时间beginDate到结束时间endDate内，REPORT表中的日期
        List<Date> dateList = reportDao.getDateByCondition(beginDate, endDate);
        for (int i = 0; i < dateList.size(); i++) {
            // 根据交易日期，获取那一日REPORT表中的所有记录
            List<Report> reportList = reportDao.getReportByDate(DateUtil.dateToString(dateList.get(i)));
            // Json文件的路径和文件名
            String path = new String(JSON_DATA_BACKUP_DIR + TableName.REPORT + "\\" + TableName.REPORT + "-" + DateUtil.dateToString(reportList.get(0).getReportDate()) + ".json");
            String jsonString = null;
            ObjectMapper objectMapper = new ObjectMapper();
            File file = new File(path);
            FileWriter fileWriter = null;
            try {
                fileWriter = new FileWriter(file, true);
                // 向json文件中写入对象
                for (int j = 0; j < reportList.size(); j++) {
                    jsonString = objectMapper.writeValueAsString(reportList.get(j));
                    fileWriter.append(jsonString + "\n");
                }
            } catch (JsonGenerationException e) {
                e.printStackTrace();
            } catch (JsonMappingException e) {
                e.printStackTrace();
            } catch (IOException e1) {
                e1.printStackTrace();
            } finally {
                try {
                    fileWriter.close();
                } catch (IOException e) {
                    e.printStackTrace();
                }
            }

            ZipCompressor zipCompressor = new ZipCompressor(path.replace(".json", ".zip"));
            // 将json文件压缩为zip文件
            zipCompressor.compress(path);

            // 删除json文件
            FileUtil.deleteFile(path);
            logger.info("删除文件【" + path + "】");
        }
    }

    /**
     * 增量备份表MDL_MACD_GOLD_CROSS
     *
     * @param beginDate
     * @param endDate
     */
    public void backupModelMACDGoldCrossIncrementalByJson(String beginDate, String endDate) {
        logger.info("增量备份表MDL_MACD_GOLD_CROSS，时间为【" + beginDate + "】至【" + endDate + "】");

        // 获取从开始时间beginDate到结束时间endDate内，MDL_MACD_GOLD_CROSS表中的日期
        List<Date> dateList = modelMACDGoldCrossDao.getDateByCondition(beginDate, endDate);
        for (int i = 0; i < dateList.size(); i++) {
            // 根据交易日期，获取那一日MDL_MACD_GOLD_CROSS表中的所有记录
            List<ModelMACDGoldCross> modelMACDGoldCrossList = modelMACDGoldCrossDao.getModelMACDGoldCrossByDate(DateUtil.dateToString(dateList.get(i)));
            // Json文件的路径和文件名
            String path = new String(JSON_DATA_BACKUP_DIR + TableName.MDL_MACD_GOLD_CROSS + "\\" + TableName.MDL_MACD_GOLD_CROSS + "-" + DateUtil.dateToString(modelMACDGoldCrossList.get(0).getSellDate()) + ".json");
            String jsonString = null;
            ObjectMapper objectMapper = new ObjectMapper();
            File file = new File(path);
            FileWriter fileWriter = null;
            // 向json文件中写入对象
            for (int j = 0; j < modelMACDGoldCrossList.size(); j++) {
                try {
                    fileWriter = new FileWriter(file, true);
                    jsonString = objectMapper.writeValueAsString(modelMACDGoldCrossList.get(j));
                    fileWriter.append(jsonString + "\n");
                } catch (JsonGenerationException e) {
                    e.printStackTrace();
                } catch (JsonMappingException e) {
                    e.printStackTrace();
                } catch (IOException e) {
                    e.printStackTrace();
                } finally {
                    try {
                        fileWriter.close();
                    } catch (IOException e) {
                        e.printStackTrace();
                    }
                }
            }

            ZipCompressor zipCompressor = new ZipCompressor(path.replace(".json", ".zip"));
            // 将json文件压缩为zip文件
            zipCompressor.compress(path);

            // 删除json文件
            FileUtil.deleteFile(path);
            logger.info("删除文件【" + path + "】");
        }
    }

    /**
     * 增量备份表MDL_MACD_DEAD_CROSS
     *
     * @param beginDate
     * @param endDate
     */
    public void backupModelMACDDeadCrossIncrementalByJson(String beginDate, String endDate) {
        logger.info("增量备份表MDL_MACD_DEAD_CROSS，时间为【" + beginDate + "】至【" + endDate + "】");

        // 获取从开始时间beginDate到结束时间endDate内，MDL_MACD_DEAD_CROSS表中的日期
        List<Date> dateList = modelMACDDeadCrossDao.getDateByCondition(beginDate, endDate);
        for (int i = 0; i < dateList.size(); i++) {
            // 根据交易日期，获取那一日MDL_MACD_DEAD_CROSS表中的所有记录
            List<ModelMACDDeadCross> modelMACDDeadCrossList = modelMACDDeadCrossDao.getModelMACDDeadCrossByDate((DateUtil.dateToString(dateList.get(i))));
            // Json文件的路径和文件名
            String path = new String(JSON_DATA_BACKUP_DIR + TableName.MDL_MACD_DEAD_CROSS + "\\" + TableName.MDL_MACD_DEAD_CROSS + "-" + DateUtil.dateToString(modelMACDDeadCrossList.get(0).getSellDate()) + ".json");
            String jsonString = null;
            ObjectMapper objectMapper = new ObjectMapper();
            File file = new File(path);
            FileWriter fileWriter = null;
            // 向json文件中写入对象
            for (int j = 0; j < modelMACDDeadCrossList.size(); j++) {
                try {
                    fileWriter = new FileWriter(file, true);
                    jsonString = objectMapper.writeValueAsString(modelMACDDeadCrossList.get(j));
                    fileWriter.append(jsonString + "\n");
                } catch (JsonGenerationException e) {
                    e.printStackTrace();
                } catch (JsonMappingException e) {
                    e.printStackTrace();
                } catch (IOException e) {
                    e.printStackTrace();
                } finally {
                    try {
                        fileWriter.close();
                    } catch (IOException e) {
                        e.printStackTrace();
                    }
                }
            }

            ZipCompressor zipCompressor = new ZipCompressor(path.replace(".json", ".zip"));
            // 将json文件压缩为zip文件
            zipCompressor.compress(path);

            // 删除json文件
            FileUtil.deleteFile(path);
            logger.info("删除文件【" + path + "】");
        }
    }

    /**
     * 增量备份表MDL_HEI_KIN_ASHI_UP_DOWN
     *
     * @param beginDate
     * @param endDate
     */
    public void backupModelHeiKinAshiUpDownIncrementalByJson(String beginDate, String endDate) {
        logger.info("增量备份表MDL_HEI_KIN_ASHI_UP_DOWN，时间为【" + beginDate + "】至【" + endDate + "】");

        // 获取从开始时间beginDate到结束时间endDate内，MDL_HEI_KIN_ASHI_UP_DOWN表中的日期
        List<Date> dateList = modelHeiKinAshiUpDownDao.getDateByCondition(beginDate, endDate);
        for (int i = 0; i < dateList.size(); i++) {
            // 根据交易日期，获取那一日MDL_HEI_KIN_ASHI_UP_DOWN表中的所有记录
            List<ModelHeiKinAshiUpDown> modelHeiKinAshiUpDownList = modelHeiKinAshiUpDownDao.getModelHeiKinAshiUpDownByDate(DateUtil.dateToString(dateList.get(i)));
            // Json文件的路径和文件名
            String path = new String(JSON_DATA_BACKUP_DIR + TableName.MDL_HEI_KIN_ASHI_UP_DOWN + "\\" + TableName.MDL_HEI_KIN_ASHI_UP_DOWN + "-" + DateUtil.dateToString(modelHeiKinAshiUpDownList.get(0).getSellDate()) + ".json");
            String jsonString = null;
            ObjectMapper objectMapper = new ObjectMapper();
            File file = new File(path);
            FileWriter fileWriter = null;
            // 向json文件中写入对象
            for (int j = 0; j < modelHeiKinAshiUpDownList.size(); j++) {
                try {
                    fileWriter = new FileWriter(file, true);
                    jsonString = objectMapper.writeValueAsString(modelHeiKinAshiUpDownList.get(j));
                    fileWriter.append(jsonString + "\n");
                } catch (JsonGenerationException e) {
                    e.printStackTrace();
                } catch (JsonMappingException e) {
                    e.printStackTrace();
                } catch (IOException e) {
                    e.printStackTrace();
                } finally {
                    try {
                        fileWriter.close();
                    } catch (IOException e) {
                        e.printStackTrace();
                    }
                }
            }

            ZipCompressor zipCompressor = new ZipCompressor(path.replace(".json", ".zip"));
            // 将json文件压缩为zip文件
            zipCompressor.compress(path);

            // 删除json文件
            FileUtil.deleteFile(path);
            logger.info("删除文件【" + path + "】");
        }
    }

    /**
     * 增量备份表MDL_HEI_KIN_ASHI_DOWN_UP
     *
     * @param beginDate
     * @param endDate
     */
    public void backupModelHeiKinAshiDownUpIncrementalByJson(String beginDate, String endDate) {
        logger.info("增量备份表MDL_HEI_KIN_ASHI_DOWN_UP，时间为【" + beginDate + "】至【" + endDate + "】");

        // 获取从开始时间beginDate到结束时间endDate内，MDL_HEI_KIN_ASHI_DOWN_UP表中的日期
        List<Date> dateList = modelHeiKinAshiDownUpDao.getDateByCondition(beginDate, endDate);
        for (int i = 0; i < dateList.size(); i++) {
            // 根据交易日期，获取那一日MDL_HEI_KIN_ASHI_DOWN_UP表中的所有记录
            List<ModelHeiKinAshiDownUp> modelHeiKinAshiDownUpList = modelHeiKinAshiDownUpDao.getModelHeiKinAshiDownUpByDate(DateUtil.dateToString(dateList.get(i)));
            // Json文件的路径和文件名
            String path = new String(JSON_DATA_BACKUP_DIR + TableName.MDL_HEI_KIN_ASHI_DOWN_UP + "\\" + TableName.MDL_HEI_KIN_ASHI_DOWN_UP + "-" + DateUtil.dateToString(modelHeiKinAshiDownUpList.get(0).getSellDate()) + ".json");
            String jsonString = null;
            ObjectMapper objectMapper = new ObjectMapper();
            File file = new File(path);
            FileWriter fileWriter = null;
            // 向json文件中写入对象
            for (int j = 0; j < modelHeiKinAshiDownUpList.size(); j++) {
                try {
                    fileWriter = new FileWriter(file, true);
                    jsonString = objectMapper.writeValueAsString(modelHeiKinAshiDownUpList.get(j));
                    fileWriter.append(jsonString + "\n");
                } catch (JsonGenerationException e) {
                    e.printStackTrace();
                } catch (JsonMappingException e) {
                    e.printStackTrace();
                } catch (IOException e) {
                    e.printStackTrace();
                } finally {
                    try {
                        fileWriter.close();
                    } catch (IOException e) {
                        e.printStackTrace();
                    }
                }
            }

            ZipCompressor zipCompressor = new ZipCompressor(path.replace(".json", ".zip"));
            // 将json文件压缩为zip文件
            zipCompressor.compress(path);

            // 删除json文件
            FileUtil.deleteFile(path);
            logger.info("删除文件【" + path + "】");
        }
    }

    /**
     * 增量备份表MDL_TOP_STOCK
     *
     * @param beginDate
     * @param endDate
     */
    public void backupModelTopStockIncrementalByJson(String beginDate, String endDate) {
        logger.info("增量备份表MDL_TOP_STOCK，时间为【" + beginDate + "】至【" + endDate + "】");

        // 获取从开始时间beginDate到结束时间endDate内，MDL_TOP_STOCK表中的日期
        List<Date> dateList = modelTopStockDao.getDateByCondition(beginDate, endDate);
        for (int i = 0; i < dateList.size(); i++) {
            // 根据交易日期，获取那一日MDL_TOP_STOCK表中的所有记录
            List<ModelTopStock> modelTopStockList = modelTopStockDao.getModelTopStockByDate(DateUtil.dateToString(dateList.get(i)));
            // Json文件的路径和文件名
            String path = new String(JSON_DATA_BACKUP_DIR + TableName.MDL_TOP_STOCK + "\\" + TableName.MDL_TOP_STOCK + "-" + DateUtil.dateToString(modelTopStockList.get(0).getDate()) + ".json");
            String jsonString = null;
            ObjectMapper objectMapper = new ObjectMapper();
            File file = new File(path);
            FileWriter fileWriter = null;
            // 向json文件中写入对象
            for (int j = 0; j < modelTopStockList.size(); j++) {
                try {
                    fileWriter = new FileWriter(file, true);
                    jsonString = objectMapper.writeValueAsString(modelTopStockList.get(j));
                    fileWriter.append(jsonString + "\n");
                } catch (JsonGenerationException e) {
                    e.printStackTrace();
                } catch (JsonMappingException e) {
                    e.printStackTrace();
                } catch (IOException e) {
                    e.printStackTrace();
                } finally {
                    try {
                        fileWriter.close();
                    } catch (IOException e) {
                        e.printStackTrace();
                    }
                }
            }

            ZipCompressor zipCompressor = new ZipCompressor(path.replace(".json", ".zip"));
            // 将json文件压缩为zip文件
            zipCompressor.compress(path);

            // 删除json文件
            FileUtil.deleteFile(path);
            logger.info("删除文件【" + path + "】");
        }
    }

    /**
     * 增量备份表MDL_TOP_STOCK_DETAIL
     *
     * @param beginDate
     * @param endDate
     */
    public void backupModelTopStockDetailIncrementalByJson(String beginDate, String endDate) {
        logger.info("增量备份表MDL_TOP_STOCK_DETAIL，时间为【" + beginDate + "】至【" + endDate + "】");

        // 获取从开始时间beginDate到结束时间endDate内，MDL_TOP_STOCK_DETAIL表中的日期
        List<Date> dateList = modelTopStockDetailDao.getDateByCondition(beginDate, endDate);
        for (int i = 0; i < dateList.size(); i++) {
            // 根据交易日期，获取那一日MDL_TOP_STOCK_DETAIL表中的所有记录
            List<ModelTopStockDetail> modelTopStockDetailList = modelTopStockDetailDao.getModelTopStockDetailByDate(DateUtil.dateToString(dateList.get(i)));
            // Json文件的路径和文件名
            String path = new String(JSON_DATA_BACKUP_DIR + TableName.MDL_TOP_STOCK_DETAIL + "\\" + TableName.MDL_TOP_STOCK_DETAIL + "-" + DateUtil.dateToString(modelTopStockDetailList.get(0).getDate()) + ".json");
            String jsonString = null;
            ObjectMapper objectMapper = new ObjectMapper();
            File file = new File(path);
            FileWriter fileWriter = null;
            // 向json文件中写入对象
            for (int j = 0; j < modelTopStockDetailList.size(); j++) {
                try {
                    fileWriter = new FileWriter(file, true);
                    jsonString = objectMapper.writeValueAsString(modelTopStockDetailList.get(j));
                    fileWriter.append(jsonString + "\n");
                } catch (JsonGenerationException e) {
                    e.printStackTrace();
                } catch (JsonMappingException e) {
                    e.printStackTrace();
                } catch (IOException e) {
                    e.printStackTrace();
                } finally {
                    try {
                        fileWriter.close();
                    } catch (IOException e) {
                        e.printStackTrace();
                    }
                }
            }

            ZipCompressor zipCompressor = new ZipCompressor(path.replace(".json", ".zip"));
            // 将json文件压缩为zip文件
            zipCompressor.compress(path);

            // 删除json文件
            FileUtil.deleteFile(path);
            logger.info("删除文件【" + path + "】");
        }
    }

    /**
     * 增量备份表MDL_STOCK_ANALYSIS
     *
     * @param beginDate
     * @param endDate
     */
    public void backupModelStockAnalysisIncrementalByJson(String beginDate, String endDate) {
        logger.info("增量备份表MDL_STOCK_ANALYSIS，时间为【" + beginDate + "】至【" + endDate + "】");

        // 获取从开始时间beginDate到结束时间endDate内，MDL_TOP_STOCK表中的日期
        List<Date> dateList = modelStockAnalysisDao.getDateByCondition(beginDate, endDate);
        for (int i = 0; i < dateList.size(); i++) {
            // 根据交易日期，获取那一日MDL_STOCK_ANALYSIS表中的所有记录
            List<ModelStockAnalysis> modelStockAnalysisList = modelStockAnalysisDao.getModelStockAnalysisByDate(DateUtil.dateToString(dateList.get(i)));
            // Json文件的路径和文件名
            String path = new String(JSON_DATA_BACKUP_DIR + TableName.MDL_STOCK_ANALYSIS + "\\" + TableName.MDL_STOCK_ANALYSIS + "-" + DateUtil.dateToString(modelStockAnalysisList.get(0).getDate()) + ".json");
            String jsonString = null;
            ObjectMapper objectMapper = new ObjectMapper();
            File file = new File(path);
            FileWriter fileWriter = null;
            // 向json文件中写入对象
            for (int j = 0; j < modelStockAnalysisList.size(); j++) {
                try {
                    fileWriter = new FileWriter(file, true);
                    jsonString = objectMapper.writeValueAsString(modelStockAnalysisList.get(j));
                    fileWriter.append(jsonString + "\n");
                } catch (JsonGenerationException e) {
                    e.printStackTrace();
                } catch (JsonMappingException e) {
                    e.printStackTrace();
                } catch (IOException e) {
                    e.printStackTrace();
                } finally {
                    try {
                        fileWriter.close();
                    } catch (IOException e) {
                        e.printStackTrace();
                    }
                }
            }

            ZipCompressor zipCompressor = new ZipCompressor(path.replace(".json", ".zip"));
            // 将json文件压缩为zip文件
            zipCompressor.compress(path);

            // 删除json文件
            FileUtil.deleteFile(path);
            logger.info("删除文件【" + path + "】");
        }
    }

    /**
     * 增量备份表MDL_STOCK_MONTH_ANALYSIS
     *
     * @param beginDate
     * @param endDate
     */
    public void backupModelStockMonthAnalysisIncrementalByJson(String beginDate, String endDate) {
        logger.info("增量备份表MDL_STOCK_MONTH_ANALYSIS，时间为【" + beginDate + "】至【" + endDate + "】");

        // 获取从开始时间beginDate到结束时间endDate内，MDL_STOCK_MONTH_ANALYSIS表中的日期
        List<Date> dateList = modelStockMonthAnalysisDao.getDateByCondition(beginDate, endDate);
        for (int i = 0; i < dateList.size(); i++) {
            // 根据交易日期，获取那一日MDL_STOCK_MONTH_ANALYSIS表中的所有记录
            List<ModelStockMonthAnalysis> modelStockMonthAnalysisList = modelStockMonthAnalysisDao.getModelStockMonthAnalysisByDate(DateUtil.dateToString(dateList.get(i)));
            // Json文件的路径和文件名
            String path = new String(JSON_DATA_BACKUP_DIR + TableName.MDL_STOCK_MONTH_ANALYSIS + "\\" + TableName.MDL_STOCK_MONTH_ANALYSIS + "-" + DateUtil.dateToString(modelStockMonthAnalysisList.get(0).getEndDate()) + ".json");
            String jsonString = null;
            ObjectMapper objectMapper = new ObjectMapper();
            File file = new File(path);
            FileWriter fileWriter = null;
            // 向json文件中写入对象
            for (int j = 0; j < modelStockMonthAnalysisList.size(); j++) {
                try {
                    fileWriter = new FileWriter(file, true);
                    jsonString = objectMapper.writeValueAsString(modelStockMonthAnalysisList.get(j));
                    fileWriter.append(jsonString + "\n");
                } catch (JsonGenerationException e) {
                    e.printStackTrace();
                } catch (JsonMappingException e) {
                    e.printStackTrace();
                } catch (IOException e) {
                    e.printStackTrace();
                } finally {
                    try {
                        fileWriter.close();
                    } catch (IOException e) {
                        e.printStackTrace();
                    }
                }
            }

            ZipCompressor zipCompressor = new ZipCompressor(path.replace(".json", ".zip"));
            // 将json文件压缩为zip文件
            zipCompressor.compress(path);

            // 删除json文件
            FileUtil.deleteFile(path);
            logger.info("删除文件【" + path + "】");
        }
    }

    /**
     * 增量备份表STOCK_MONTH
     *
     * @param beginDate
     * @param endDate
     */
    public void backupStockMonthIncrementalByJson(String beginDate, String endDate) {
        logger.info("增量备份表STOCK_MONTH，时间为【" + beginDate + "】至【" + endDate + "】");

        // 获取从开始时间beginDate到结束时间endDate内，STOCK_MONTH表中的日期
        List<Date> dateList = stockMonthDao.getDateByCondition(beginDate, endDate);
        for (int i = 0; i < dateList.size(); i++) {
            // 根据交易日期，获取那一日STOCK_MONTH表中的所有记录
            List<StockMonth> stockMonthList = stockMonthDao.getStockMonthByDate(DateUtil.dateToString(dateList.get(i)));
            // Json文件的路径和文件名
            String path = new String(JSON_DATA_BACKUP_DIR + TableName.STOCK_MONTH + "\\" + TableName.STOCK_MONTH + "-" + DateUtil.dateToString(stockMonthList.get(0).getEndDate()) + ".json");
            String jsonString = null;
            ObjectMapper objectMapper = new ObjectMapper();
            File file = new File(path);
            FileWriter fileWriter = null;
            // 向json文件中写入对象
            for (int j = 0; j < stockMonthList.size(); j++) {
                try {
                    fileWriter = new FileWriter(file, true);
                    jsonString = objectMapper.writeValueAsString(stockMonthList.get(j));
                    fileWriter.append(jsonString + "\n");
                } catch (JsonGenerationException e) {
                    e.printStackTrace();
                } catch (JsonMappingException e) {
                    e.printStackTrace();
                } catch (IOException e) {
                    e.printStackTrace();
                } finally {
                    try {
                        fileWriter.close();
                    } catch (IOException e) {
                        e.printStackTrace();
                    }
                }
            }

            ZipCompressor zipCompressor = new ZipCompressor(path.replace(".json", ".zip"));
            // 将json文件压缩为zip文件
            zipCompressor.compress(path);

            // 删除json文件
            FileUtil.deleteFile(path);
            logger.info("删除文件【" + path + "】");
        }
    }

    /**
     * 增量备份表MDL_WEEK_KD_GOLD_CROSS
     *
     * @param beginDate
     * @param endDate
     */
    public void backupModelWeekKDGoldCrossIncrementalByJson(String beginDate, String endDate) {
        logger.info("增量备份表MDL_WEEK_KD_GOLD_CROSS，时间为【" + beginDate + "】至【" + endDate + "】");

        // 获取从开始时间beginDate到结束时间endDate内，MDL_WEEK_KD_GOLD_CROSS表中的日期
        List<Date> dateList = modelWeekKDGoldCrossDao.getDateByCondition(beginDate, endDate);
        for (int i = 0; i < dateList.size(); i++) {
            // 根据交易日期，获取那一日MDL_WEEK_KD_GOLD_CROSS表中的所有记录
            List<ModelWeekKDGoldCross> modelWeekKDGoldCrossList = modelWeekKDGoldCrossDao.getModelWeekKDGoldCrossByDate(DateUtil.dateToString(dateList.get(i)));
            // Json文件的路径和文件名
            String path = new String(JSON_DATA_BACKUP_DIR + TableName.MDL_WEEK_KD_GOLD_CROSS + "\\" + TableName.MDL_WEEK_KD_GOLD_CROSS + "-" + DateUtil.dateToString(modelWeekKDGoldCrossList.get(0).getSellEndDate()) + ".json");
            String jsonString = null;
            ObjectMapper objectMapper = new ObjectMapper();
            File file = new File(path);
            FileWriter fileWriter = null;
            // 向json文件中写入对象
            for (int j = 0; j < modelWeekKDGoldCrossList.size(); j++) {
                try {
                    fileWriter = new FileWriter(file, true);
                    jsonString = objectMapper.writeValueAsString(modelWeekKDGoldCrossList.get(j));
                    fileWriter.append(jsonString + "\n");
                } catch (JsonGenerationException e) {
                    e.printStackTrace();
                } catch (JsonMappingException e) {
                    e.printStackTrace();
                } catch (IOException e) {
                    e.printStackTrace();
                } finally {
                    try {
                        fileWriter.close();
                    } catch (IOException e) {
                        e.printStackTrace();
                    }
                }
            }

            ZipCompressor zipCompressor = new ZipCompressor(path.replace(".json", ".zip"));
            // 将json文件压缩为zip文件
            zipCompressor.compress(path);

            // 删除json文件
            FileUtil.deleteFile(path);
            logger.info("删除文件【" + path + "】");
        }
    }

    /**
     * 增量备份表MDL_KD_GOLD_CROSS
     *
     * @param beginDate
     * @param endDate
     */
    public void backupModelKDGoldCrossIncrementalByJson(String beginDate, String endDate) {
        logger.info("增量备份表MDL_KD_GOLD_CROSS，时间为【" + beginDate + "】至【" + endDate + "】");

        // 获取从开始时间beginDate到结束时间endDate内，MDL_KD_GOLD_CROSS表中的日期
        List<Date> dateList = modelKDGoldCrossDao.getDateByCondition(beginDate, endDate);
        for (int i = 0; i < dateList.size(); i++) {
            // 根据交易日期，获取那一日MDL_KD_GOLD_CROSS表中的所有记录
            List<ModelKDGoldCross> modelKDGoldCrossList = modelKDGoldCrossDao.getModelKDGoldCrossByDate(DateUtil.dateToString(dateList.get(i)));
            // Json文件的路径和文件名
            String path = new String(JSON_DATA_BACKUP_DIR + TableName.MDL_KD_GOLD_CROSS + "\\" + TableName.MDL_KD_GOLD_CROSS + "-" + DateUtil.dateToString(modelKDGoldCrossList.get(0).getSellDate()) + ".json");
            String jsonString = null;
            ObjectMapper objectMapper = new ObjectMapper();
            File file = new File(path);
            FileWriter fileWriter = null;
            // 向json文件中写入对象
            for (int j = 0; j < modelKDGoldCrossList.size(); j++) {
                try {
                    fileWriter = new FileWriter(file, true);
                    jsonString = objectMapper.writeValueAsString(modelKDGoldCrossList.get(j));
                    fileWriter.append(jsonString + "\n");
                } catch (JsonGenerationException e) {
                    e.printStackTrace();
                } catch (JsonMappingException e) {
                    e.printStackTrace();
                } catch (IOException e) {
                    e.printStackTrace();
                } finally {
                    try {
                        fileWriter.close();
                    } catch (IOException e) {
                        e.printStackTrace();
                    }
                }
            }

            ZipCompressor zipCompressor = new ZipCompressor(path.replace(".json", ".zip"));
            // 将json文件压缩为zip文件
            zipCompressor.compress(path);

            // 删除json文件
            FileUtil.deleteFile(path);
            logger.info("删除文件【" + path + "】");
        }
    }

    /**
     * 增量备份表MDL_KD_DEAD_CROSS
     *
     * @param beginDate
     * @param endDate
     */
    public void backupModelKDDeadCrossIncrementalByJson(String beginDate, String endDate) {
        logger.info("增量备份表MDL_KD_DEAD_CROSS，时间为【" + beginDate + "】至【" + endDate + "】");

        // 获取从开始时间beginDate到结束时间endDate内，MDL_KD_DEAD_CROSS表中的日期
        List<Date> dateList = modelKDDeadCrossDao.getDateByCondition(beginDate, endDate);
        for (int i = 0; i < dateList.size(); i++) {
            // 根据交易日期，获取那一日MDL_KD_GOLD_CROSS表中的所有记录
            List<ModelKDDeadCross> modelKDDeadCrossList = modelKDDeadCrossDao.getModelKDDeadCrossByDate(DateUtil.dateToString(dateList.get(i)));
            // Json文件的路径和文件名
            String path = new String(JSON_DATA_BACKUP_DIR + TableName.MDL_KD_DEAD_CROSS + "\\" + TableName.MDL_KD_DEAD_CROSS + "-" + DateUtil.dateToString(modelKDDeadCrossList.get(0).getSellDate()) + ".json");
            String jsonString = null;
            ObjectMapper objectMapper = new ObjectMapper();
            File file = new File(path);
            FileWriter fileWriter = null;
            // 向json文件中写入对象
            for (int j = 0; j < modelKDDeadCrossList.size(); j++) {
                try {
                    fileWriter = new FileWriter(file, true);
                    jsonString = objectMapper.writeValueAsString(modelKDDeadCrossList.get(j));
                    fileWriter.append(jsonString + "\n");
                } catch (JsonGenerationException e) {
                    e.printStackTrace();
                } catch (JsonMappingException e) {
                    e.printStackTrace();
                } catch (IOException e) {
                    e.printStackTrace();
                } finally {
                    try {
                        fileWriter.close();
                    } catch (IOException e) {
                        e.printStackTrace();
                    }
                }
            }

            ZipCompressor zipCompressor = new ZipCompressor(path.replace(".json", ".zip"));
            // 将json文件压缩为zip文件
            zipCompressor.compress(path);

            // 删除json文件
            FileUtil.deleteFile(path);
            logger.info("删除文件【" + path + "】");
        }
    }

    /**
     * 增量备份表FOREIGN_EXCHANGE_RECORD
     *
     * @param beginDate
     * @param endDate
     */
    public void backupForeignExchangeRecord(String beginDate, String endDate) {
        logger.info("增量备份表FOREIGN_EXCHANGE_RECORD，时间为【" + beginDate + "】至【" + endDate + "】");

        // 获取从开始时间beginDate到结束时间endDate内，FOREIGN_EXCHANGE_RECORD表中的日期
        List<ForeignExchangeRecord> foreignExchangeRecordTimestampList = foreignExchangeRecordDao.getDateByCondition(beginDate, endDate);
        // List<ForeignExchangeRecord>数组中的每个对象只有一个有效字段dateTime，当截断年月日时会有重复，因此要去掉重复的，最后再以List<Date>类型返回。
        List<Date> dateTimeList = DateUtil.timestampToDateWithoutDuplicate(foreignExchangeRecordTimestampList);
        for (int i = 0; i < dateTimeList.size(); i++) {
            // 根据交易日期，获取那一日FOREIGN_EXCHANGE_RECORD表中的所有记录
            List<ForeignExchangeRecord> foreignExchangeRecordList = foreignExchangeRecordDao.getForeignExchangeRecordByDate(DateUtil.dateToString(dateTimeList.get(i)));
            if (foreignExchangeRecordList.size() != 0) {
                // Json文件的路径和文件名
                String fileNamePart = DateUtil.dateToString(foreignExchangeRecordList.get(0).getDateTime()).replace(":", "-").replace(".", "-");
                String path = new String(JSON_DATA_BACKUP_DIR + TableName.FOREIGN_EXCHANGE_RECORD + "\\" + TableName.FOREIGN_EXCHANGE_RECORD + "-" + fileNamePart + ".json");
                String jsonString = null;
                ObjectMapper objectMapper = new ObjectMapper();
                File file = new File(path);
                FileWriter fileWriter = null;
                try {
                    fileWriter = new FileWriter(file, true);
                } catch (IOException e2) {
                    e2.printStackTrace();
                }
                // 向json文件中写入对象
                for (int j = 0; j < foreignExchangeRecordList.size(); j++) {
                    try {
                        jsonString = objectMapper.writeValueAsString(foreignExchangeRecordList.get(j));
                        fileWriter.append(jsonString + "\n");
                    } catch (JsonGenerationException e) {
                        e.printStackTrace();
                    } catch (JsonMappingException e) {
                        e.printStackTrace();
                    } catch (IOException e) {
                        e.printStackTrace();
                    }
                }
                try {

                    fileWriter.close();
                } catch (IOException e1) {
                    e1.printStackTrace();
                }

                ZipCompressor zipCompressor = new ZipCompressor(path.replace(".json", ".zip"));
                // 将json文件压缩为zip文件
                zipCompressor.compress(path);
                // 删除json文件
                FileUtil.deleteFile(path);
            }
        }

        logger.info("backup table FOREIGN_EXCHANGE_RECORD incremental by json finish");
    }

    /**
     * C_F_DATE_CONTRACT_DATA
     *
     * @param beginDate
     * @param endDate
     */
    public void backupCommodityFutureDateContractDataRecord(String beginDate, String endDate) {
        logger.info("增量备份表C_F_DATE_CONTRACT_DATA，时间为【" + beginDate + "】至【" + endDate + "】");

        // 获取从开始时间beginDate到结束时间endDate内，C_F_DATE_CONTRACT_DATA表中的日期
        List<CommodityFutureDateContractData> commodityFutureDateContractDataList = commodityFutureDateContractDataDao.getDateByCondition(beginDate, endDate);
        // List<CommodityFutureDateContractData>数组中的每个对象只有一个有效字段transactionDate，当截断年月日时会有重复，因此要去掉重复的，最后再以List<Date>类型返回。
        List<Date> dateTimeList = DateUtil.timestampToTransactionDateWithoutDuplicate(commodityFutureDateContractDataList);
        for (int i = 0; i < dateTimeList.size(); i++) {
            // 根据交易日期，获取那一日C_F_DATE_CONTRACT_DATA表中的所有记录
            List<CommodityFutureDateContractData> _commodityFutureDateContractDataList = commodityFutureDateContractDataDao.getCommodityFutureDateContractDataByDate(DateUtil.dateToString(dateTimeList.get(i)));
            if (_commodityFutureDateContractDataList.size() != 0) {
                // Json文件的路径和文件名
                String fileNamePart = DateUtil.dateToString(_commodityFutureDateContractDataList.get(0).getTransactionDate()).replace(":", "-").replace(".", "-");
                String path = new String(JSON_DATA_BACKUP_DIR + TableName.C_F_DATE_CONTRACT_DATA + "\\" + TableName.C_F_DATE_CONTRACT_DATA + "-" + fileNamePart + ".json");
                String jsonString = null;
                ObjectMapper objectMapper = new ObjectMapper();
                File file = new File(path);
                FileWriter fileWriter = null;
                try {
                    fileWriter = new FileWriter(file, true);
                } catch (IOException e2) {
                    e2.printStackTrace();
                }
                // 向json文件中写入对象
                for (int j = 0; j < _commodityFutureDateContractDataList.size(); j++) {
                    try {
                        jsonString = objectMapper.writeValueAsString(_commodityFutureDateContractDataList.get(j));
                        fileWriter.append(jsonString + "\n");
                    } catch (JsonGenerationException e) {
                        e.printStackTrace();
                    } catch (JsonMappingException e) {
                        e.printStackTrace();
                    } catch (IOException e) {
                        e.printStackTrace();
                    }
                }
                try {

                    fileWriter.close();
                } catch (IOException e1) {
                    e1.printStackTrace();
                }

                ZipCompressor zipCompressor = new ZipCompressor(path.replace(".json", ".zip"));
                // 将json文件压缩为zip文件
                zipCompressor.compress(path);
                // 删除json文件
                FileUtil.deleteFile(path);
            }
        }

        logger.info("backup table C_F_DATE_CONTRACT_DATA incremental by json finish");
    }

    /**
     * 增量备份表MDL_CLOSE_PRICE_MA5_GOLD_CROSS
     *
     * @param beginDate
     * @param endDate
     */
    public void backupModelClosePriceMA5GoldCrossIncrementalByJson(String beginDate, String endDate) {
        logger.info("增量备份表MDL_CLOSE_PRICE_MA5_GOLD_CROSS，时间为【" + beginDate + "】至【" + endDate + "】");

        // 获取从开始时间beginDate到结束时间endDate内，MDL_CLOSE_PRICE_MA5_GOLD_CROSS表中的日期
        List<Date> dateList = modelClosePriceMA5GoldCrossDao.getDateByCondition(beginDate, endDate);
        for (int i = 0; i < dateList.size(); i++) {
            // 根据交易日期，获取那一日MDL_CLOSE_PRICE_MA5_GOLD_CROSS表中的所有记录
            List<ModelClosePriceMA5GoldCross> modelClosePriceMA5GoldCrossList = modelClosePriceMA5GoldCrossDao.getModelClosePriceMA5GoldCrossByDate(DateUtil.dateToString(dateList.get(i)));
            // Json文件的路径和文件名
            String path = new String(JSON_DATA_BACKUP_DIR + TableName.MDL_CLOSE_PRICE_MA5_GOLD_CROSS + "\\" + TableName.MDL_CLOSE_PRICE_MA5_GOLD_CROSS + "-" + DateUtil.dateToString(modelClosePriceMA5GoldCrossList.get(0).getSellDate()) + ".json");
            String jsonString = null;
            ObjectMapper objectMapper = new ObjectMapper();
            File file = new File(path);
            FileWriter fileWriter = null;
            // 向json文件中写入对象
            for (int j = 0; j < modelClosePriceMA5GoldCrossList.size(); j++) {
                try {
                    fileWriter = new FileWriter(file, true);
                    jsonString = objectMapper.writeValueAsString(modelClosePriceMA5GoldCrossList.get(j));
                    fileWriter.append(jsonString + "\n");
                } catch (JsonGenerationException e) {
                    e.printStackTrace();
                } catch (JsonMappingException e) {
                    e.printStackTrace();
                } catch (IOException e) {
                    e.printStackTrace();
                } finally {
                    try {
                        fileWriter.close();
                    } catch (IOException e) {
                        e.printStackTrace();
                    }
                }
            }

            ZipCompressor zipCompressor = new ZipCompressor(path.replace(".json", ".zip"));
            // 将json文件压缩为zip文件
            zipCompressor.compress(path);

            // 删除json文件
            FileUtil.deleteFile(path);
            logger.info("删除文件【" + path + "】");
        }
    }

    /**
     * 增量备份表MDL_CLOSE_PRICE_MA5_DEAD_CROSS
     *
     * @param beginDate
     * @param endDate
     */
    public void backupModelClosePriceMA5DeadCrossIncrementalByJson(String beginDate, String endDate) {
        logger.info("增量备份表MDL_CLOSE_PRICE_MA5_DEAD_CROSS，时间为【" + beginDate + "】至【" + endDate + "】");

        // 获取从开始时间beginDate到结束时间endDate内，MDL_CLOSE_PRICE_MA5_DEAD_CROSS表中的日期
        List<Date> dateList = modelClosePriceMA5DeadCrossDao.getDateByCondition(beginDate, endDate);
        for (int i = 0; i < dateList.size(); i++) {
            // 根据交易日期，获取那一日MDL_CLOSE_PRICE_MA5_DEAD_CROSS表中的所有记录
            List<ModelClosePriceMA5DeadCross> modelClosePriceMA5DeadCrossList = modelClosePriceMA5DeadCrossDao.getModelClosePriceMA5DeadCrossByDate(DateUtil.dateToString(dateList.get(i)));
            // Json文件的路径和文件名
            String path = new String(JSON_DATA_BACKUP_DIR + TableName.MDL_CLOSE_PRICE_MA5_DEAD_CROSS + "\\" + TableName.MDL_CLOSE_PRICE_MA5_DEAD_CROSS + "-" + DateUtil.dateToString(modelClosePriceMA5DeadCrossList.get(0).getSellDate()) + ".json");
            String jsonString = null;
            ObjectMapper objectMapper = new ObjectMapper();
            File file = new File(path);
            FileWriter fileWriter = null;
            // 向json文件中写入对象
            for (int j = 0; j < modelClosePriceMA5DeadCrossList.size(); j++) {
                try {
                    fileWriter = new FileWriter(file, true);
                    jsonString = objectMapper.writeValueAsString(modelClosePriceMA5DeadCrossList.get(j));
                    fileWriter.append(jsonString + "\n");
                } catch (JsonGenerationException e) {
                    e.printStackTrace();
                } catch (JsonMappingException e) {
                    e.printStackTrace();
                } catch (IOException e) {
                    e.printStackTrace();
                } finally {
                    try {
                        fileWriter.close();
                    } catch (IOException e) {
                        e.printStackTrace();
                    }
                }
            }

            ZipCompressor zipCompressor = new ZipCompressor(path.replace(".json", ".zip"));
            // 将json文件压缩为zip文件
            zipCompressor.compress(path);

            // 删除json文件
            FileUtil.deleteFile(path);
            logger.info("删除文件【" + path + "】");
        }
    }

    /**
     * 增量备份表MDL_ALL_GOLD_CROSS
     *
     * @param beginDate
     * @param endDate
     */
    public void backupModelAllGoldCrossIncrementalByJson(String beginDate, String endDate) {
        logger.info("增量备份表MDL_ALL_GOLD_CROSS，时间为【" + beginDate + "】至【" + endDate + "】");

        // 获取从开始时间beginDate到结束时间endDate内，MDL_ALL_GOLD_CROSS表中的日期
        List<Date> dateList = modelAllGoldCrossDao.getDateByCondition(beginDate, endDate);
        for (int i = 0; i < dateList.size(); i++) {
            // 根据交易日期，获取那一日MDL_ALL_GOLD_CROSS表中的所有记录
            List<ModelAllGoldCross> modelAllGoldCrossList = modelAllGoldCrossDao.getModelAllGoldCrossByDate(DateUtil.dateToString(dateList.get(i)));
            // Json文件的路径和文件名
            String path = new String(JSON_DATA_BACKUP_DIR + TableName.MDL_ALL_GOLD_CROSS + "\\" + TableName.MDL_ALL_GOLD_CROSS + "-" + DateUtil.dateToString(modelAllGoldCrossList.get(0).getSellDate()) + ".json");
            String jsonString = null;
            ObjectMapper objectMapper = new ObjectMapper();
            File file = new File(path);
            FileWriter fileWriter = null;
            // 向json文件中写入对象
            for (int j = 0; j < modelAllGoldCrossList.size(); j++) {
                try {
                    fileWriter = new FileWriter(file, true);
                    jsonString = objectMapper.writeValueAsString(modelAllGoldCrossList.get(j));
                    fileWriter.append(jsonString + "\n");
                } catch (JsonGenerationException e) {
                    e.printStackTrace();
                } catch (JsonMappingException e) {
                    e.printStackTrace();
                } catch (IOException e) {
                    e.printStackTrace();
                } finally {
                    try {
                        fileWriter.close();
                    } catch (IOException e) {
                        e.printStackTrace();
                    }
                }
            }

            ZipCompressor zipCompressor = new ZipCompressor(path.replace(".json", ".zip"));
            // 将json文件压缩为zip文件
            zipCompressor.compress(path);

            // 删除json文件
            FileUtil.deleteFile(path);
            logger.info("删除文件【" + path + "】");
        }
    }

    /**
     * 增量备份表BOARD
     */
    public void backupBoardIncrementalByJson() {
        logger.info("增量备份表BOARD");

        // 获取BOARD表中的所有记录
        List<Board> boardList = boardDao.getAllBoard(Boolean.FALSE);
        // Json文件的路径和文件名
        String path = new String(JSON_DATA_BACKUP_DIR + TableName.BOARD + "\\" + TableName.BOARD + ".json");
        String jsonString = null;
        ObjectMapper objectMapper = new ObjectMapper();
        File file = new File(path);
        FileWriter fileWriter = null;
        try {
            fileWriter = new FileWriter(file, true);
            // 向json文件中写入对象
            for (int j = 0; j < boardList.size(); j++) {
                jsonString = objectMapper.writeValueAsString(boardList.get(j));
                fileWriter.append(jsonString + "\n");
            }
        } catch (JsonGenerationException e) {
            e.printStackTrace();
        } catch (JsonMappingException e) {
            e.printStackTrace();
        } catch (IOException e) {
            e.printStackTrace();
        } finally {
            try {
                fileWriter.close();
            } catch (IOException e) {
                e.printStackTrace();
            }
        }

        ZipCompressor zipCompressor = new ZipCompressor(path.replace(".json", ".zip"));
        // 将json文件压缩为zip文件
        zipCompressor.compress(path);

        // 删除json文件
        FileUtil.deleteFile(path);
        logger.info("删除文件【" + path + "】");
    }

    /**
     * 增量备份表STOCK_INFO
     */
    public void backupStockInfoIncrementalByJson() {
        logger.info("增量备份表STOCK_INFO");

        // 获取STOCK_INFO表中的所有记录
        List<StockInfo> stockInfoList = stockInfoDao.getAllStockInfo();
        // Json文件的路径和文件名
        String path = new String(JSON_DATA_BACKUP_DIR + TableName.STOCK_INFO + "\\" + TableName.STOCK_INFO + ".json");
        String jsonString = null;
        ObjectMapper objectMapper = new ObjectMapper();
        File file = new File(path);
        FileWriter fileWriter = null;
        // 向json文件中写入对象
        try {
            fileWriter = new FileWriter(file, true);
            for (int j = 0; j < stockInfoList.size(); j++) {
                jsonString = objectMapper.writeValueAsString(stockInfoList.get(j));
                fileWriter.append(jsonString + "\n");
            }
        } catch (JsonGenerationException e) {
            e.printStackTrace();
        } catch (JsonMappingException e) {
            e.printStackTrace();
        } catch (IOException e) {
            e.printStackTrace();
        } finally {
            try {
                fileWriter.close();
            } catch (IOException e) {
                e.printStackTrace();
            }
        }

        ZipCompressor zipCompressor = new ZipCompressor(path.replace(".json", ".zip"));
        // 将json文件压缩为zip文件
        zipCompressor.compress(path);

        // 删除json文件
        FileUtil.deleteFile(path);
        logger.info("删除文件【" + path + "】");
    }

    /**
     * 增量备份表MODEL
     */
    public void backupModelIncrementalByJson() {
        logger.info("增量备份表MODEL");

        // 获取MODEL表中的所有记录
        List<Model> modelList = modelDao.findAll();
        // Json文件的路径和文件名
        String path = new String(JSON_DATA_BACKUP_DIR + TableName.MODEL + "\\" + TableName.MODEL + ".json");
        String jsonString = null;
        ObjectMapper objectMapper = new ObjectMapper();
        File file = new File(path);
        FileWriter fileWriter = null;
        try {
            fileWriter = new FileWriter(file, true);
            // 向json文件中写入对象
            for (int j = 0; j < modelList.size(); j++) {
                jsonString = objectMapper.writeValueAsString(modelList.get(j));
                fileWriter.append(jsonString + "\n");
            }
        } catch (JsonGenerationException e) {
            e.printStackTrace();
        } catch (JsonMappingException e) {
            e.printStackTrace();
        } catch (IOException e) {
            e.printStackTrace();
        } finally {
            try {
                fileWriter.close();
            } catch (IOException e) {
                e.printStackTrace();
            }
        }

        ZipCompressor zipCompressor = new ZipCompressor(path.replace(".json", ".zip"));
        // 将json文件压缩为zip文件
        zipCompressor.compress(path);

        // 删除json文件
        FileUtil.deleteFile(path);
        logger.info("删除文件【" + path + "】");
    }

    /**
     * 增量备份表FOREIGN_EXCHANGE
     */
    public void backupForeignExchangeIncrementalByJson() {
        logger.info("backup table FOREIGN_EXCHANGE incremental by json begin");

        // 获取FOREIGN_EXCHANGE表中的所有记录
        List<ForeignExchange> foreignExchangeList = foreignExchangeDao.getAllForeignExchange();
        // Json文件的路径和文件名
        String path = new String(JSON_DATA_BACKUP_DIR + TableName.FOREIGN_EXCHANGE + "\\" + TableName.FOREIGN_EXCHANGE + ".json");
        String jsonString = null;
        ObjectMapper objectMapper = new ObjectMapper();
        File file = new File(path);
        FileWriter fileWriter = null;
        try {
            fileWriter = new FileWriter(file, true);
        } catch (IOException e2) {
            e2.printStackTrace();
        }
        // 向json文件中写入对象
        for (int j = 0; j < foreignExchangeList.size(); j++) {
            try {
                jsonString = objectMapper.writeValueAsString(foreignExchangeList.get(j));
                fileWriter.append(jsonString + "\n");
            } catch (JsonGenerationException e) {
                e.printStackTrace();
            } catch (JsonMappingException e) {
                e.printStackTrace();
            } catch (IOException e) {
                e.printStackTrace();
            }
        }
        try {

            fileWriter.close();
        } catch (IOException e1) {
            e1.printStackTrace();
        }

        ZipCompressor zipCompressor = new ZipCompressor(path.replace(".json", ".zip"));
        // 将json文件压缩为zip文件
        zipCompressor.compress(path);
        // 删除json文件
        FileUtil.deleteFile(path);

        logger.info("backup table FOREIGN_EXCHANGE incremental by json finish");
    }

/*********************************************************************************************************************
 *
 * 									                         使用json文件恢复数据库表
 *
 *********************************************************************************************************************/
    /**
     * 恢复数据库的各个表中的数据
     */
    public void restoreTableIncrementalByJson() {
        logger.info("开始恢复数据库的各个表中的数据");

        String tableNames = PropertiesUtil.getValue(BACKUP_PROPERTIES, "restore.database.table_name");
        String dayBeginDate = PropertiesUtil.getValue(BACKUP_PROPERTIES, "restore.database.day.begin_date");
        String dayEndDate = PropertiesUtil.getValue(BACKUP_PROPERTIES, "restore.database.day.end_date");
        String weekBeginDate = PropertiesUtil.getValue(BACKUP_PROPERTIES, "restore.database.week.begin_date");
        String weekEndDate = PropertiesUtil.getValue(BACKUP_PROPERTIES, "restore.database.week.end_date");

        String[] tableNameArray = tableNames.replace("[", "").replace("]", "").replace(" ", "").split(",");
        for (int i = 0; i < tableNameArray.length; i++) {
            if (TableName.STOCK_TRANSACTION_DATA.getName().trim().equals(tableNameArray[i])) {
                // 向表STOCK_TRANSACTION_DATA中插入数据
                this.restoreStockTransactionDataIncrementalByJson(dayBeginDate, dayEndDate);
            }
            if (TableName.STOCK_INDEX.getName().trim().equals(tableNameArray[i])) {
                // 向表STOCK_INDEX中插入数据
                this.restoreStockIndexIncrementalByJson();
            }
            if (TableName.BOARD_INDEX.getName().trim().equals(tableNameArray[i])) {
                // 向表BOARD_INDEX中插入数据
                this.restoreBoardIndexIncrementalByJson();
            }
            if (TableName.REPORT.getName().trim().equals(tableNameArray[i])) {
                // 向表REPORT中插入数据
                this.restoreReportIncrementalByJson();
            }
            if (TableName.STOCK_WEEK.getName().trim().equals(tableNameArray[i])) {
                // 向表STOCK_WEEK中插入数据
                this.restoreStockWeekIncrementalByJson(weekBeginDate, weekEndDate);
            }
            if (TableName.STOCK_MONTH.getName().trim().equals(tableNameArray[i])) {
                // 向表STOCK_MONTH中插入数据
                this.restoreStockMonthIncrementalByJson();
            }
            if (TableName.STOCK_INDEX_WEEK.getName().trim().equals(tableNameArray[i])) {
                // 向表STOCK_INDEX_WEEK中插入数据
                this.restoreStockIndexWeekIncrementalByJson();
            }

            if (TableName.MDL_ALL_GOLD_CROSS.getName().trim().equals(tableNameArray[i])) {
                // 向表MDL_ALL_GOLD_CROSS中插入数据
                this.restoreModelAllGoldCrossIncrementalByJson();
            }
            if (TableName.MDL_CLOSE_PRICE_MA5_GOLD_CROSS.getName().trim().equals(tableNameArray[i])) {
                // 向表MDL_CLOSE_PRICE_MA5_GOLD_CROSS中插入数据
                this.restoreModelClosePriceMA5GoldCrossIncrementalByJson();
            }
            if (TableName.MDL_CLOSE_PRICE_MA5_DEAD_CROSS.getName().trim().equals(tableNameArray[i])) {
                // 向表MDL_CLOSE_PRICE_MA5_DEAD_CROSS中插入数据
                this.restoreModelClosePriceMA5DeadCrossIncrementalByJson();
            }
            if (TableName.MDL_MACD_GOLD_CROSS.getName().trim().equals(tableNameArray[i])) {
                // 向表MDL_MACD_GOLD_CROSS中插入数据
                this.restoreModelMACDGoldCrossIncrementalByJson();
            }
            if (TableName.MDL_MACD_DEAD_CROSS.getName().trim().equals(tableNameArray[i])) {
                // 向表MDL_MACD_DEAD_CROSS中插入数据
                this.restoreModelMACDDeadCrossIncrementalByJson();
            }
            if (TableName.MDL_HEI_KIN_ASHI_UP_DOWN.getName().trim().equals(tableNameArray[i])) {
                // 向表MDL_HEI_KIN_ASHI_UP_DOWN中插入数据
                this.restoreModelHeiKinAshiUpDownIncrementalByJson();
            }
            if (TableName.MDL_HEI_KIN_ASHI_DOWN_UP.getName().trim().equals(tableNameArray[i])) {
                // 向表MDL_HEI_KIN_ASHI_DOWN_UP中插入数据
                this.restoreModelHeiKinAshiDownUpIncrementalByJson();
            }
            if (TableName.MDL_KD_GOLD_CROSS.getName().trim().equals(tableNameArray[i])) {
                // 向表MDL_KD_GOLD_CROSS中插入数据
                this.restoreModelKDGoldCrossIncrementalByJson();
            }
            if (TableName.MDL_KD_DEAD_CROSS.getName().trim().equals(tableNameArray[i])) {
                // 向表MDL_KD_DEAD_CROSS中插入数据
                this.restoreModelKDDeadCrossIncrementalByJson();
            }
            if (TableName.MDL_TOP_STOCK.getName().trim().equals(tableNameArray[i])) {
                // 向表MDL_TOP_STOCK中插入数据
                this.restoreModelTopStockIncrementalByJson();
            }
            if (TableName.MDL_TOP_STOCK_DETAIL.getName().trim().equals(tableNameArray[i])) {
                // 向表MDL_TOP_STOCK_DETAIL中插入数据
                this.restoreModelTopStockDetailIncrementalByJson();
            }
            if (TableName.MDL_STOCK_ANALYSIS.getName().trim().equals(tableNameArray[i])) {
                // 向表MDL_STOCK_ANALYSIS中插入数据
                this.restoreModelStockAnalysisIncrementalByJson();
            }
            if (TableName.MDL_STOCK_MONTH_ANALYSIS.getName().trim().equals(tableNameArray[i])) {
                // 向表MDL_STOCK_MONTH_ANALYSIS中插入数据
                this.restoreModelStockMonthAnalysisIncrementalByJson();
            }

            if (TableName.MDL_WEEK_KD_GOLD_CROSS.getName().trim().equals(tableNameArray[i])) {
                // 向表MDL_WEEK_KD_GOLD_CROSS中插入数据
                this.restoreModelWeekKDGoldCrossIncrementalByJson();
            }

            if (TableName.STOCK_INFO.getName().trim().equals(tableNameArray[i])) {
                // 向表STOCK_INFO中插入数据
                this.restoreStockInfoIncrementalByJson();
            }
            if (TableName.BOARD.getName().trim().equals(tableNameArray[i])) {
                // 向表BOARD中插入数据
                this.restoreBoardIncrementalByJson();
            }
            if (TableName.MODEL.getName().trim().equals(tableNameArray[i])) {
                // 向表MODEL中插入数据
                this.restoreModelIncrementalByJson();
            }
            if (TableName.FOREIGN_EXCHANGE.getName().trim().equals(tableNameArray[i])) {
                // 向表FOREIGN_EXCHANGE中插入数据
                this.restoreForeignExchangeIncrementalByJson();
            }
            if (TableName.FOREIGN_EXCHANGE_RECORD.getName().trim().equals(tableNameArray[i])) {
                // 向表FOREIGN_EXCHANGE_RECORD中插入数据
                this.restoreForeignExchangeRecordByJson();
            }
        }

        logger.info("使用json恢复数据库的各个表中的数据完成");
    }

    /**
     * 恢复表STOCK_TRANSACTION_DATA中的数据
     *
     * @param beginTime
     * @param endTime
     */
    public void restoreStockTransactionDataIncrementalByJson(String beginTime, String endTime) {
        logger.info("开始恢复表STOCK_TRANSACTION_DATA中的数据");

        String path = new String(JSON_DATA_BACKUP_DIR + TableName.STOCK_TRANSACTION_DATA);
        File fPath = new File(path);
        File[] files = fPath.listFiles();
        for (File file : files) {
            // 当文件的后缀为zip，并且时间在beginTime和endTime之间时，才将这个文件解压
            String fileExtension = FilenameUtils.getExtension(file.getName());
            int fileDate = Integer.parseInt(file.getName().split("-")[1].substring(0, 8));
            int fileBeginDate = Integer.parseInt(beginTime);
            int fileEndDate = Integer.parseInt(endTime);
            try {
                if (fileExtension.equals("zip") && fileDate >= fileBeginDate && fileDate <= fileEndDate) {
                    // 将每一个zip文件解压为json文件
                    ZipDecompressor.unZipFiles(file, path);
                }
            } catch (IOException e) {
                e.printStackTrace();
            }
        }

        File deletePath = new File(path);
        File[] deleteFile = deletePath.listFiles();
        for (File file : deleteFile) {
            int fileDate = Integer.parseInt(file.getName().split("-")[1].substring(0, 8));
            int fileBeginDate = Integer.parseInt(beginTime);
            int fileEndDate = Integer.parseInt(endTime);
            if (file.getName().endsWith(".json") && fileDate >= fileBeginDate && fileDate <= fileEndDate) {
                try {
                    String s;
                    ObjectMapper mapper = new ObjectMapper();
                    StockTransactionData stockTransactionData = new StockTransactionData();
                    FileReader fr = new FileReader(file);
                    BufferedReader br = new BufferedReader(fr);
                    try {
                        while ((s = br.readLine()) != null) {
                            stockTransactionData = mapper.readValue(s, StockTransactionData.class);
                            stockTransactionDataDao.save(stockTransactionData);
                        }
                    } catch (JsonParseException e) {
                        e.printStackTrace();
                    } catch (JsonMappingException e) {
                        e.printStackTrace();
                    } catch (IOException e) {
                        e.printStackTrace();
                    }
                } catch (FileNotFoundException e1) {
                    e1.printStackTrace();
                }

                // 删除json文件
                FileUtil.deleteFile(file.toString());
            }
        }
    }

    /**
     * 恢复表STOCK_WEEK中的数据
     *
     * @param beginDate
     * @param endDate
     */
    public void restoreStockWeekIncrementalByJson(String beginDate, String endDate) {
        logger.info("恢复表STOCK_WEEK中的数据，日期为【" + beginDate + "】至【" + endDate + "】");

        String path = new String(JSON_DATA_BACKUP_DIR + TableName.STOCK_WEEK);
        File fPath = new File(path);
        File[] files = fPath.listFiles();
        for (File file : files) {
            // 当文件的后缀为zip，并且时间在beginTime和endTime之间时，才将这个文件解压
//			String fileExtension=FilenameUtils.getExtension(file.getName());
//			int fileDate=Integer.parseInt(file.getName().split("-")[1].substring(0,8));
//			int fileBeginDate=Integer.parseInt(beginDate);
//			int fileEndDate=Integer.parseInt(endDate);
            try {
//				if(fileExtension.equals("zip") && fileDate>=fileBeginDate && fileDate<=fileEndDate){
                // 将每一个zip文件解压为json文件
                ZipDecompressor.unZipFiles(file, path);
//				}
            } catch (IOException e) {
                e.printStackTrace();
            }
        }

        File deletePath = new File(path);
        File[] deleteFile = deletePath.listFiles();
        for (File file : deleteFile) {
//			int fileDate=Integer.parseInt(file.getName().split("-")[1].substring(0,8));
//			int fileBeginDate=Integer.parseInt(beginDate);
//			int fileEndDate=Integer.parseInt(endDate);
            if (file.getName().endsWith(".json") /*&& fileDate>=fileBeginDate && fileDate<=fileEndDate*/) {
                FileReader fileReader = null;
                BufferedReader bufferedReader = null;
                try {
                    String s;
                    ObjectMapper mapper = new ObjectMapper();
                    StockWeek stockWeek = new StockWeek();
                    fileReader = new FileReader(file);
                    bufferedReader = new BufferedReader(fileReader);
                    while ((s = bufferedReader.readLine()) != null) {
                        stockWeek = mapper.readValue(s, StockWeek.class);
                        stockWeekDao.saveStockWeek(stockWeek);
                    }
                } catch (FileNotFoundException e1) {
                    e1.printStackTrace();
                } catch (JsonParseException e) {
                    e.printStackTrace();
                } catch (JsonMappingException e) {
                    e.printStackTrace();
                } catch (IOException e) {
                    e.printStackTrace();
                } finally {
                    try {
                        bufferedReader.close();
                    } catch (IOException e) {
                        e.printStackTrace();
                    }
                    try {
                        fileReader.close();
                    } catch (IOException e) {
                        e.printStackTrace();
                    }
                }

                // 删除json文件
                FileUtil.deleteFile(file.toString());
                logger.info("删除了文件【" + file.toString() + "】");
            }
        }
    }

    /**
     * 恢复表STOCK_MONTH中的数据
     */
    public void restoreStockMonthIncrementalByJson() {
        logger.info("恢复表STOCK_MONTH中的数据");

        String path = new String(JSON_DATA_BACKUP_DIR + TableName.STOCK_MONTH);
        File fPath = new File(path);
        File[] files = fPath.listFiles();
        for (File file : files) {
            // 当文件的后缀为zip，并且时间在beginTime和endTime之间时，才将这个文件解压
            try {
                // 将每一个zip文件解压为json文件
                ZipDecompressor.unZipFiles(file, path);
            } catch (IOException e) {
                e.printStackTrace();
            }
        }

        File deletePath = new File(path);
        File[] deleteFile = deletePath.listFiles();
        for (File file : deleteFile) {
            if (file.getName().endsWith(".json")) {
                FileReader fileReader = null;
                BufferedReader bufferedReader = null;
                try {
                    String s;
                    ObjectMapper mapper = new ObjectMapper();
                    StockMonth stockMonth = new StockMonth();
                    fileReader = new FileReader(file);
                    bufferedReader = new BufferedReader(fileReader);
                    while ((s = bufferedReader.readLine()) != null) {
                        stockMonth = mapper.readValue(s, StockMonth.class);
                        stockMonthDao.saveStockMonth(stockMonth);
                    }
                } catch (FileNotFoundException e1) {
                    e1.printStackTrace();
                } catch (JsonParseException e) {
                    e.printStackTrace();
                } catch (JsonMappingException e) {
                    e.printStackTrace();
                } catch (IOException e) {
                    e.printStackTrace();
                } finally {
                    try {
                        bufferedReader.close();
                    } catch (IOException e) {
                        e.printStackTrace();
                    }
                    try {
                        fileReader.close();
                    } catch (IOException e) {
                        e.printStackTrace();
                    }
                }

                // 删除json文件
                FileUtil.deleteFile(file.toString());
                logger.info("删除了文件【" + file.toString() + "】");
            }
        }
    }

    /**
     * 恢复表STOCK_INDEX中的数据
     */
    public void restoreStockIndexIncrementalByJson() {
        logger.info("恢复表STOCK_INDEX中的数据");

        String path = new String(JSON_DATA_BACKUP_DIR + TableName.STOCK_INDEX);
        File fPath = new File(path);
        File[] files = fPath.listFiles();
        for (File file : files) {
            try {
                // 将每一个zip文件解压为json文件
                ZipDecompressor.unZipFiles(file, path);
            } catch (IOException e) {
                e.printStackTrace();
            }
        }

        File deletePath = new File(path);
        File[] deleteFile = deletePath.listFiles();
        for (File file : deleteFile) {
            if (file.getName().endsWith(".json")) {
                FileReader fileReader = null;
                try {
                    String s;
                    ObjectMapper mapper = new ObjectMapper();
                    StockIndex stockIndex = new StockIndex();
                    fileReader = new FileReader(file);
                    BufferedReader bufferedReader = new BufferedReader(fileReader);
                    while ((s = bufferedReader.readLine()) != null) {
                        stockIndex = mapper.readValue(s, StockIndex.class);
                        stockIndexDao.saveStockIndex(stockIndex);
                    }
                } catch (JsonParseException e) {
                    e.printStackTrace();
                } catch (JsonMappingException e) {
                    e.printStackTrace();
                } catch (IOException e) {
                    e.printStackTrace();
                } finally {
                    try {
                        fileReader.close();
                    } catch (IOException e) {
                        e.printStackTrace();
                    }
                }

                // 删除json文件
                FileUtil.deleteFile(file.toString());
                logger.info("删除文件【" + file.toString() + "】");
            }
        }
    }

    /**
     * 恢复表STOCK_INDEX_WEEK中的数据
     */
    public void restoreStockIndexWeekIncrementalByJson() {
        logger.info("恢复表STOCK_INDEX_WEEK中的数据");

        String path = new String(JSON_DATA_BACKUP_DIR + TableName.STOCK_INDEX_WEEK);
        File fPath = new File(path);
        File[] files = fPath.listFiles();
        for (File file : files) {
            // 当文件的后缀为zip，并且时间在beginTime和endTime之间时，才将这个文件解压
            try {
                // 将每一个zip文件解压为json文件
                ZipDecompressor.unZipFiles(file, path);
            } catch (IOException e) {
                e.printStackTrace();
            }
        }

        File deletePath = new File(path);
        File[] deleteFile = deletePath.listFiles();
        for (File file : deleteFile) {
            if (file.getName().endsWith(".json")) {
                FileReader fileReader = null;
                BufferedReader bufferedReader = null;
                try {
                    String s;
                    ObjectMapper mapper = new ObjectMapper();
                    StockIndexWeek stockIndexWeek = new StockIndexWeek();
                    fileReader = new FileReader(file);
                    bufferedReader = new BufferedReader(fileReader);
                    while ((s = bufferedReader.readLine()) != null) {
                        stockIndexWeek = mapper.readValue(s, StockIndexWeek.class);
                        stockIndexWeekDao.saveStockIndexWeek(stockIndexWeek);
                    }
                } catch (FileNotFoundException e1) {
                    e1.printStackTrace();
                } catch (JsonParseException e) {
                    e.printStackTrace();
                } catch (JsonMappingException e) {
                    e.printStackTrace();
                } catch (IOException e) {
                    e.printStackTrace();
                } finally {
                    try {
                        bufferedReader.close();
                    } catch (IOException e) {
                        e.printStackTrace();
                    }
                    try {
                        fileReader.close();
                    } catch (IOException e) {
                        e.printStackTrace();
                    }
                }

                // 删除json文件
                FileUtil.deleteFile(file.toString());
                logger.info("删除文件【" + file.toString() + "】");
            }
        }
    }

    /**
     * 恢复表BOARD_INDEX中的数据
     */
    public void restoreBoardIndexIncrementalByJson() {
        logger.info("恢复表BOARD_INDEX中的数据");

        String path = new String(JSON_DATA_BACKUP_DIR + TableName.BOARD_INDEX);
        File fPath = new File(path);
        File[] files = fPath.listFiles();
        for (File file : files) {
            try {
                // 将每一个zip文件解压为json文件
                ZipDecompressor.unZipFiles(file, path);
            } catch (IOException e) {
                e.printStackTrace();
            }
        }

        File deletePath = new File(path);
        File[] deleteFile = deletePath.listFiles();
        FileReader fileReader = null;
        BufferedReader bufferedReader = null;

        for (File file : deleteFile) {
            if (file.getName().endsWith(".json")) {
                try {
                    String s;
                    ObjectMapper mapper = new ObjectMapper();
                    BoardIndex boardIndex = new BoardIndex();
                    fileReader = new FileReader(file);
                    bufferedReader = new BufferedReader(fileReader);
                    while ((s = bufferedReader.readLine()) != null) {
                        boardIndex = mapper.readValue(s, BoardIndex.class);
                        boardIndexDao.save(boardIndex);
                    }
                } catch (JsonParseException e) {
                    e.printStackTrace();
                } catch (JsonMappingException e) {
                    e.printStackTrace();
                } catch (IOException e) {
                    e.printStackTrace();
                } finally {
                    try {
                        bufferedReader.close();
                    } catch (IOException e) {
                        e.printStackTrace();
                    }
                    try {
                        fileReader.close();
                    } catch (IOException e) {
                        e.printStackTrace();
                    }
                }

                FileUtil.deleteFile(file.toString());
                logger.info("删除文件【" + file.toString() + "】");
            }
        }
    }

    /**
     * 恢复表REPORT中的数据
     */
    public void restoreReportIncrementalByJson() {
        logger.info("恢复表REPORT中的数据");

        String path = new String(JSON_DATA_BACKUP_DIR + TableName.REPORT);
        File fPath = new File(path);
        File[] files = fPath.listFiles();
        for (File file : files) {
            try {
                // 将每一个zip文件解压为json文件
                ZipDecompressor.unZipFiles(file, path);
            } catch (IOException e) {
                e.printStackTrace();
            }
        }

        File deletePath = new File(path);
        File[] deleteFile = deletePath.listFiles();
        for (File file : deleteFile) {
            if (file.getName().endsWith(".json")) {
                FileReader fileReader = null;
                BufferedReader bufferedReader = null;
                try {
                    String s;
                    ObjectMapper mapper = new ObjectMapper();
                    Report report = new Report();
                    fileReader = new FileReader(file);
                    bufferedReader = new BufferedReader(fileReader);
                    while ((s = bufferedReader.readLine()) != null) {
                        report = mapper.readValue(s, Report.class);
                        reportDao.insertReport(report);
                    }
                } catch (FileNotFoundException e1) {
                    e1.printStackTrace();
                } catch (JsonParseException e) {
                    e.printStackTrace();
                } catch (JsonMappingException e) {
                    e.printStackTrace();
                } catch (IOException e) {
                    e.printStackTrace();
                } finally {
                    try {
                        bufferedReader.close();
                    } catch (IOException e) {
                        e.printStackTrace();
                    }
                    try {
                        fileReader.close();
                    } catch (IOException e) {
                        e.printStackTrace();
                    }
                }

                // 删除json文件
                FileUtil.deleteFile(file.toString());
                logger.info("删除文件【" + file.toString() + "】");
            }
        }
    }

    /**
     * 恢复表MDL_CLOSE_PRICE_MA5_GOLD_CROSS的数据
     */
    public void restoreModelClosePriceMA5GoldCrossIncrementalByJson() {
        logger.info("恢复表MDL_CLOSE_PRICE_MA5_GOLD_CROSS的数据");

        String path = new String(JSON_DATA_BACKUP_DIR + TableName.MDL_CLOSE_PRICE_MA5_GOLD_CROSS);
        File fPath = new File(path);
        File[] files = fPath.listFiles();
        for (File file : files) {
            try {
                // 将每一个zip文件解压为json文件
                ZipDecompressor.unZipFiles(file, path);
            } catch (IOException e) {
                e.printStackTrace();
            }
        }

        File deletePath = new File(path);
        File[] deleteFile = deletePath.listFiles();
        for (File file : deleteFile) {
            if (file.getName().endsWith(".json")) {
                FileReader fileReader = null;
                BufferedReader bufferedReader = null;
                try {
                    String s;
                    ObjectMapper mapper = new ObjectMapper();
                    ModelClosePriceMA5GoldCross modelClosePriceMA5GoldCross = new ModelClosePriceMA5GoldCross();
                    fileReader = new FileReader(file);
                    bufferedReader = new BufferedReader(fileReader);
                    while ((s = bufferedReader.readLine()) != null) {
                        modelClosePriceMA5GoldCross = mapper.readValue(s, ModelClosePriceMA5GoldCross.class);
                        modelClosePriceMA5GoldCrossDao.save(modelClosePriceMA5GoldCross);
                    }
                } catch (JsonParseException e) {
                    e.printStackTrace();
                } catch (JsonMappingException e) {
                    e.printStackTrace();
                } catch (IOException e) {
                    e.printStackTrace();
                } finally {
                    try {
                        bufferedReader.close();
                    } catch (IOException e) {
                        e.printStackTrace();
                    }
                    try {
                        fileReader.close();
                    } catch (IOException e) {
                        e.printStackTrace();
                    }
                }

                // 删除json文件
                FileUtil.deleteFile(file.toString());
                logger.info("删除文件【" + file.toString() + "】");
            }
        }
    }

    /**
     * 恢复表MDL_CLOSE_PRICE_MA5_DEAD_CROSS的数据
     */
    public void restoreModelClosePriceMA5DeadCrossIncrementalByJson() {
        logger.info("恢复表MDL_CLOSE_PRICE_MA5_DEAD_CROSS的数据");

        String path = new String(JSON_DATA_BACKUP_DIR + TableName.MDL_CLOSE_PRICE_MA5_DEAD_CROSS);
        File fPath = new File(path);
        File[] files = fPath.listFiles();
        for (File file : files) {
            try {
                // 将每一个zip文件解压为json文件
                ZipDecompressor.unZipFiles(file, path);
            } catch (IOException e) {
                e.printStackTrace();
            }
        }

        File deletePath = new File(path);
        File[] deleteFile = deletePath.listFiles();
        for (File file : deleteFile) {
            if (file.getName().endsWith(".json")) {
                FileReader fileReader = null;
                BufferedReader bufferedReader = null;
                try {
                    String s;
                    ObjectMapper mapper = new ObjectMapper();
                    ModelClosePriceMA5DeadCross modelClosePriceMA5DeadCross = new ModelClosePriceMA5DeadCross();
                    fileReader = new FileReader(file);
                    bufferedReader = new BufferedReader(fileReader);
                    while ((s = bufferedReader.readLine()) != null) {
                        modelClosePriceMA5DeadCross = mapper.readValue(s, ModelClosePriceMA5DeadCross.class);
                        modelClosePriceMA5DeadCrossDao.save(modelClosePriceMA5DeadCross);
                    }
                } catch (JsonParseException e) {
                    e.printStackTrace();
                } catch (JsonMappingException e) {
                    e.printStackTrace();
                } catch (IOException e) {
                    e.printStackTrace();
                } finally {
                    try {
                        bufferedReader.close();
                    } catch (IOException e) {
                        e.printStackTrace();
                    }
                    try {
                        fileReader.close();
                    } catch (IOException e) {
                        e.printStackTrace();
                    }
                }

                // 删除json文件
                FileUtil.deleteFile(file.toString());
                logger.info("删除文件【" + file.toString() + "】");
            }
        }
    }

    /**
     * 恢复表MDL_ALL_GOLD_CROSS的数据
     */
    public void restoreModelAllGoldCrossIncrementalByJson() {
        logger.info("恢复表MDL_ALL_GOLD_CROSS的数据");

        String path = new String(JSON_DATA_BACKUP_DIR + TableName.MDL_ALL_GOLD_CROSS);
        File fPath = new File(path);
        File[] files = fPath.listFiles();
        for (File file : files) {
            try {
                // 将每一个zip文件解压为json文件
                ZipDecompressor.unZipFiles(file, path);
            } catch (IOException e) {
                e.printStackTrace();
            }
        }

        File deletePath = new File(path);
        File[] deleteFile = deletePath.listFiles();
        for (File file : deleteFile) {
            if (file.getName().endsWith(".json")) {
                FileReader fileReader = null;
                BufferedReader bufferedReader = null;
                try {
                    String s;
                    ObjectMapper mapper = new ObjectMapper();
                    ModelAllGoldCross modelAllGoldCross = new ModelAllGoldCross();
                    fileReader = new FileReader(file);
                    bufferedReader = new BufferedReader(fileReader);
                    while ((s = bufferedReader.readLine()) != null) {
                        modelAllGoldCross = mapper.readValue(s, ModelAllGoldCross.class);
                        modelAllGoldCrossDao.save(modelAllGoldCross);
                    }
                } catch (JsonParseException e) {
                    e.printStackTrace();
                } catch (JsonMappingException e) {
                    e.printStackTrace();
                } catch (IOException e) {
                    e.printStackTrace();
                } finally {
                    try {
                        bufferedReader.close();
                    } catch (IOException e) {
                        e.printStackTrace();
                    }
                    try {
                        fileReader.close();
                    } catch (IOException e) {
                        e.printStackTrace();
                    }
                }

                // 删除json文件
                FileUtil.deleteFile(file.toString());
                logger.info("删除文件【" + file.toString() + "】");
            }
        }
    }

    /**
     * 恢复表MDL_MACD_GOLD_CROSS的数据
     */
    public void restoreModelMACDGoldCrossIncrementalByJson() {
        logger.info("恢复表MDL_MACD_GOLD_CROSS的数据");

        String path = new String(JSON_DATA_BACKUP_DIR + TableName.MDL_MACD_GOLD_CROSS);
        File fPath = new File(path);
        File[] files = fPath.listFiles();
        for (File file : files) {
            try {
                // 将每一个zip文件解压为json文件
                ZipDecompressor.unZipFiles(file, path);
            } catch (IOException e) {
                e.printStackTrace();
            }
        }

        File deletePath = new File(path);
        File[] deleteFile = deletePath.listFiles();
        for (File file : deleteFile) {
            if (file.getName().endsWith(".json")) {
                FileReader fileReader = null;
                BufferedReader bufferedReader = null;
                try {
                    String s;
                    ObjectMapper mapper = new ObjectMapper();
                    ModelMACDGoldCross modelMACDGoldCross = new ModelMACDGoldCross();
                    fileReader = new FileReader(file);
                    bufferedReader = new BufferedReader(fileReader);
                    while ((s = bufferedReader.readLine()) != null) {
                        modelMACDGoldCross = mapper.readValue(s, ModelMACDGoldCross.class);
                        modelMACDGoldCrossDao.save(modelMACDGoldCross);
                    }
                } catch (JsonParseException e) {
                    e.printStackTrace();
                } catch (JsonMappingException e) {
                    e.printStackTrace();
                } catch (IOException e) {
                    e.printStackTrace();
                } finally {
                    try {
                        bufferedReader.close();
                    } catch (IOException e) {
                        e.printStackTrace();
                    }
                    try {
                        fileReader.close();
                    } catch (IOException e) {
                        e.printStackTrace();
                    }
                }

                // 删除json文件
                FileUtil.deleteFile(file.toString());
                logger.info("删除文件【" + file.toString() + "】");
            }
        }
    }

    /**
     * 恢复表MDL_MACD_DEAD_CROSS的数据
     */
    public void restoreModelMACDDeadCrossIncrementalByJson() {
        logger.info("恢复表MDL_MACD_DEAD_CROSS的数据");

        String path = new String(JSON_DATA_BACKUP_DIR + TableName.MDL_MACD_DEAD_CROSS);
        File fPath = new File(path);
        File[] files = fPath.listFiles();
        for (File file : files) {
            try {
                // 将每一个zip文件解压为json文件
                ZipDecompressor.unZipFiles(file, path);
            } catch (IOException e) {
                e.printStackTrace();
            }
        }

        File deletePath = new File(path);
        File[] deleteFile = deletePath.listFiles();
        for (File file : deleteFile) {
            if (file.getName().endsWith(".json")) {
                FileReader fileReader = null;
                BufferedReader bufferedReader = null;
                try {
                    String s;
                    ObjectMapper mapper = new ObjectMapper();
                    ModelMACDDeadCross modelMACDDeadCross = new ModelMACDDeadCross();
                    fileReader = new FileReader(file);
                    bufferedReader = new BufferedReader(fileReader);
                    while ((s = bufferedReader.readLine()) != null) {
                        modelMACDDeadCross = mapper.readValue(s, ModelMACDDeadCross.class);
                        modelMACDDeadCrossDao.save(modelMACDDeadCross);
                    }
                } catch (JsonParseException e) {
                    e.printStackTrace();
                } catch (JsonMappingException e) {
                    e.printStackTrace();
                } catch (IOException e) {
                    e.printStackTrace();
                } finally {
                    try {
                        bufferedReader.close();
                    } catch (IOException e) {
                        e.printStackTrace();
                    }
                    try {
                        fileReader.close();
                    } catch (IOException e) {
                        e.printStackTrace();
                    }
                }

                // 删除json文件
                FileUtil.deleteFile(file.toString());
                logger.info("删除文件【" + file.toString() + "】");
            }
        }
    }

    /**
     * 恢复表MDL_HEI_KIN_ASHI_UP_DOWN的数据
     */
    public void restoreModelHeiKinAshiUpDownIncrementalByJson() {
        logger.info("恢复表MDL_HEI_KIN_ASHI_UP_DOWN的数据");

        String path = new String(JSON_DATA_BACKUP_DIR + TableName.MDL_HEI_KIN_ASHI_UP_DOWN);
        File fPath = new File(path);
        File[] files = fPath.listFiles();
        for (File file : files) {
            try {
                // 将每一个zip文件解压为json文件
                ZipDecompressor.unZipFiles(file, path);
            } catch (IOException e) {
                e.printStackTrace();
            }
        }

        File deletePath = new File(path);
        File[] deleteFile = deletePath.listFiles();
        for (File file : deleteFile) {
            if (file.getName().endsWith(".json")) {
                FileReader fileReader = null;
                BufferedReader bufferedReader = null;
                try {
                    String s;
                    ObjectMapper mapper = new ObjectMapper();
                    ModelHeiKinAshiUpDown modelHeiKinAshiUpDown = new ModelHeiKinAshiUpDown();
                    fileReader = new FileReader(file);
                    bufferedReader = new BufferedReader(fileReader);
                    while ((s = bufferedReader.readLine()) != null) {
                        modelHeiKinAshiUpDown = mapper.readValue(s, ModelHeiKinAshiUpDown.class);
                        modelHeiKinAshiUpDownDao.save(modelHeiKinAshiUpDown);
                    }
                } catch (JsonParseException e) {
                    e.printStackTrace();
                } catch (JsonMappingException e) {
                    e.printStackTrace();
                } catch (IOException e) {
                    e.printStackTrace();
                } finally {
                    try {
                        bufferedReader.close();
                    } catch (IOException e) {
                        e.printStackTrace();
                    }
                    try {
                        fileReader.close();
                    } catch (IOException e) {
                        e.printStackTrace();
                    }
                }

                // 删除json文件
                FileUtil.deleteFile(file.toString());
                logger.info("删除文件【" + file.toString() + "】");
            }
        }
    }

    /**
     * 恢复表MDL_HEI_KIN_ASHI_DOWN_UP的数据
     */
    public void restoreModelHeiKinAshiDownUpIncrementalByJson() {
        logger.info("恢复表MDL_HEI_KIN_ASHI_DOWN_UP的数据");

        String path = new String(JSON_DATA_BACKUP_DIR + TableName.MDL_HEI_KIN_ASHI_DOWN_UP);
        File fPath = new File(path);
        File[] files = fPath.listFiles();
        for (File file : files) {
            try {
                // 将每一个zip文件解压为json文件
                ZipDecompressor.unZipFiles(file, path);
            } catch (IOException e) {
                e.printStackTrace();
            }
        }

        File deletePath = new File(path);
        File[] deleteFile = deletePath.listFiles();
        for (File file : deleteFile) {
            if (file.getName().endsWith(".json")) {
                FileReader fileReader = null;
                BufferedReader bufferedReader = null;
                try {
                    String s;
                    ObjectMapper mapper = new ObjectMapper();
                    ModelHeiKinAshiDownUp modelHeiKinAshiDownUp = new ModelHeiKinAshiDownUp();
                    fileReader = new FileReader(file);
                    bufferedReader = new BufferedReader(fileReader);
                    while ((s = bufferedReader.readLine()) != null) {
                        modelHeiKinAshiDownUp = mapper.readValue(s, ModelHeiKinAshiDownUp.class);
                        modelHeiKinAshiDownUpDao.save(modelHeiKinAshiDownUp);
                    }
                } catch (JsonParseException e) {
                    e.printStackTrace();
                } catch (JsonMappingException e) {
                    e.printStackTrace();
                } catch (IOException e) {
                    e.printStackTrace();
                } finally {
                    try {
                        bufferedReader.close();
                    } catch (IOException e) {
                        e.printStackTrace();
                    }
                    try {
                        fileReader.close();
                    } catch (IOException e) {
                        e.printStackTrace();
                    }
                }

                // 删除json文件
                FileUtil.deleteFile(file.toString());
                logger.info("删除文件【" + file.toString() + "】");
            }
        }
    }

    /**
     * 恢复表MDL_TOP_STOCK的数据
     */
    public void restoreModelTopStockIncrementalByJson() {
        logger.info("恢复表MDL_TOP_STOCK的数据");

        String path = new String(JSON_DATA_BACKUP_DIR + TableName.MDL_TOP_STOCK);
        File fPath = new File(path);
        File[] files = fPath.listFiles();
        for (File file : files) {
            try {
                // 将每一个zip文件解压为json文件
                ZipDecompressor.unZipFiles(file, path);
            } catch (IOException e) {
                e.printStackTrace();
            }
        }

        File deletePath = new File(path);
        File[] deleteFile = deletePath.listFiles();
        for (File file : deleteFile) {
            if (file.getName().endsWith(".json")) {
                FileReader fileReader = null;
                BufferedReader bufferedReader = null;
                try {
                    String s;
                    ObjectMapper mapper = new ObjectMapper();
                    ModelTopStock modelTopStock = new ModelTopStock();
                    fileReader = new FileReader(file);
                    bufferedReader = new BufferedReader(fileReader);
                    while ((s = bufferedReader.readLine()) != null) {
                        modelTopStock = mapper.readValue(s, ModelTopStock.class);
                        modelTopStockDao.save(modelTopStock);
                    }
                } catch (JsonParseException e) {
                    e.printStackTrace();
                } catch (JsonMappingException e) {
                    e.printStackTrace();
                } catch (IOException e) {
                    e.printStackTrace();
                } finally {
                    try {
                        bufferedReader.close();
                    } catch (IOException e) {
                        e.printStackTrace();
                    }
                    try {
                        fileReader.close();
                    } catch (IOException e) {
                        e.printStackTrace();
                    }
                }

                // 删除json文件
                FileUtil.deleteFile(file.toString());
                logger.info("删除文件【" + file.toString() + "】");
            }
        }
    }

    /**
     * 恢复表MDL_TOP_STOCK_DETAIL的数据
     */
    public void restoreModelTopStockDetailIncrementalByJson() {
        logger.info("恢复表MDL_TOP_STOCK_DETAIL的数据");

        String path = new String(JSON_DATA_BACKUP_DIR + TableName.MDL_TOP_STOCK_DETAIL);
        File fPath = new File(path);
        File[] files = fPath.listFiles();
        for (File file : files) {
            try {
                // 将每一个zip文件解压为json文件
                ZipDecompressor.unZipFiles(file, path);
            } catch (IOException e) {
                e.printStackTrace();
            }
        }

        File deletePath = new File(path);
        File[] deleteFile = deletePath.listFiles();
        for (File file : deleteFile) {
            if (file.getName().endsWith(".json")) {
                FileReader fileReader = null;
                BufferedReader bufferedReader = null;
                try {
                    String s;
                    ObjectMapper mapper = new ObjectMapper();
                    ModelTopStockDetail modelTopStockDetail = new ModelTopStockDetail();
                    fileReader = new FileReader(file);
                    bufferedReader = new BufferedReader(fileReader);
                    while ((s = bufferedReader.readLine()) != null) {
                        modelTopStockDetail = mapper.readValue(s, ModelTopStockDetail.class);
                        modelTopStockDetailDao.save(modelTopStockDetail);
                    }
                } catch (JsonParseException e) {
                    e.printStackTrace();
                } catch (JsonMappingException e) {
                    e.printStackTrace();
                } catch (IOException e) {
                    e.printStackTrace();
                } finally {
                    try {
                        bufferedReader.close();
                    } catch (IOException e) {
                        e.printStackTrace();
                    }
                    try {
                        fileReader.close();
                    } catch (IOException e) {
                        e.printStackTrace();
                    }
                }

                // 删除json文件
                FileUtil.deleteFile(file.toString());
                logger.info("删除文件【" + file.toString() + "】");
            }
        }
    }

    /**
     * 向表MDL_STOCK_ANALYSIS中插入数据
     */
    public void restoreModelStockAnalysisIncrementalByJson() {
        logger.info("恢复表MDL_STOCK_ANALYSIS的数据");

        String path = new String(JSON_DATA_BACKUP_DIR + TableName.MDL_STOCK_ANALYSIS);
        File fPath = new File(path);
        File[] files = fPath.listFiles();
        for (File file : files) {
            try {
                // 将每一个zip文件解压为json文件
                ZipDecompressor.unZipFiles(file, path);
            } catch (IOException e) {
                e.printStackTrace();
            }
        }

        File deletePath = new File(path);
        File[] deleteFile = deletePath.listFiles();
        for (File file : deleteFile) {
            if (file.getName().endsWith(".json")) {
                FileReader fileReader = null;
                BufferedReader bufferedReader = null;
                try {
                    String s;
                    ObjectMapper mapper = new ObjectMapper();
                    ModelStockAnalysis modelStockAnalysis = new ModelStockAnalysis();
                    fileReader = new FileReader(file);
                    bufferedReader = new BufferedReader(fileReader);
                    while ((s = bufferedReader.readLine()) != null) {
                        modelStockAnalysis = mapper.readValue(s, ModelStockAnalysis.class);
                        modelStockAnalysisDao.save(modelStockAnalysis);
                    }
                } catch (JsonParseException e) {
                    e.printStackTrace();
                } catch (JsonMappingException e) {
                    e.printStackTrace();
                } catch (IOException e) {
                    e.printStackTrace();
                } finally {
                    try {
                        bufferedReader.close();
                    } catch (IOException e) {
                        e.printStackTrace();
                    }
                    try {
                        fileReader.close();
                    } catch (IOException e) {
                        e.printStackTrace();
                    }
                }

                // 删除json文件
                FileUtil.deleteFile(file.toString());
                logger.info("删除文件【" + file.toString() + "】");
            }
        }
    }

    /**
     * 向表MDL_STOCK_MONTH_ANALYSIS中插入数据
     */
    public void restoreModelStockMonthAnalysisIncrementalByJson() {
        logger.info("恢复表MDL_STOCK_MONTH_ANALYSIS的数据");

        String path = new String(JSON_DATA_BACKUP_DIR + TableName.MDL_STOCK_MONTH_ANALYSIS);
        File fPath = new File(path);
        File[] files = fPath.listFiles();
        for (File file : files) {
            try {
                // 将每一个zip文件解压为json文件
                ZipDecompressor.unZipFiles(file, path);
            } catch (IOException e) {
                e.printStackTrace();
            }
        }

        File deletePath = new File(path);
        File[] deleteFile = deletePath.listFiles();
        for (File file : deleteFile) {
            if (file.getName().endsWith(".json")) {
                FileReader fileReader = null;
                BufferedReader bufferedReader = null;
                try {
                    String s;
                    ObjectMapper mapper = new ObjectMapper();
                    ModelStockMonthAnalysis modelStockMonthAnalysis = new ModelStockMonthAnalysis();
                    fileReader = new FileReader(file);
                    bufferedReader = new BufferedReader(fileReader);
                    while ((s = bufferedReader.readLine()) != null) {
                        modelStockMonthAnalysis = mapper.readValue(s, ModelStockMonthAnalysis.class);
                        modelStockMonthAnalysisDao.save(modelStockMonthAnalysis);
                    }
                } catch (JsonParseException e) {
                    e.printStackTrace();
                } catch (JsonMappingException e) {
                    e.printStackTrace();
                } catch (IOException e) {
                    e.printStackTrace();
                } finally {
                    try {
                        bufferedReader.close();
                    } catch (IOException e) {
                        e.printStackTrace();
                    }
                    try {
                        fileReader.close();
                    } catch (IOException e) {
                        e.printStackTrace();
                    }
                }

                // 删除json文件
                FileUtil.deleteFile(file.toString());
                logger.info("删除文件【" + file.toString() + "】");
            }
        }
    }

    /**
     * 恢复表MDL_WEEK_KD_GOLD_CROSS的数据
     */
    public void restoreModelWeekKDGoldCrossIncrementalByJson() {
        logger.info("恢复表MDL_WEEK_KD_GOLD_CROSS的数据");

        String path = new String(JSON_DATA_BACKUP_DIR + TableName.MDL_WEEK_KD_GOLD_CROSS);
        File fPath = new File(path);
        File[] files = fPath.listFiles();
        for (File file : files) {
            try {
                // 将每一个zip文件解压为json文件
                ZipDecompressor.unZipFiles(file, path);
            } catch (IOException e) {
                e.printStackTrace();
            }
        }

        File deletePath = new File(path);
        File[] deleteFile = deletePath.listFiles();
        for (File file : deleteFile) {
            if (file.getName().endsWith(".json")) {
                FileReader fileReader = null;
                BufferedReader bufferedReader = null;
                try {
                    String s;
                    ObjectMapper mapper = new ObjectMapper();
                    ModelWeekKDGoldCross modelWeekKDGoldCross = new ModelWeekKDGoldCross();
                    fileReader = new FileReader(file);
                    bufferedReader = new BufferedReader(fileReader);
                    while ((s = bufferedReader.readLine()) != null) {
                        modelWeekKDGoldCross = mapper.readValue(s, ModelWeekKDGoldCross.class);
                        modelWeekKDGoldCrossDao.save(modelWeekKDGoldCross);
                    }
                } catch (JsonParseException e) {
                    e.printStackTrace();
                } catch (JsonMappingException e) {
                    e.printStackTrace();
                } catch (IOException e) {
                    e.printStackTrace();
                } finally {
                    try {
                        bufferedReader.close();
                    } catch (IOException e) {
                        e.printStackTrace();
                    }
                    try {
                        fileReader.close();
                    } catch (IOException e) {
                        e.printStackTrace();
                    }
                }

                // 删除json文件
                FileUtil.deleteFile(file.toString());
                logger.info("删除文件【" + file.toString() + "】");
            }
        }
    }

    /**
     * 恢复表MDL_KD_GOLD_CROSS的数据
     */
    public void restoreModelKDGoldCrossIncrementalByJson() {
        logger.info("恢复表MDL_KD_GOLD_CROSS的数据");

        String path = new String(JSON_DATA_BACKUP_DIR + TableName.MDL_KD_GOLD_CROSS);
        File fPath = new File(path);
        File[] files = fPath.listFiles();
        for (File file : files) {
            try {
                // 将每一个zip文件解压为json文件
                ZipDecompressor.unZipFiles(file, path);
            } catch (IOException e) {
                e.printStackTrace();
            }
        }

        File deletePath = new File(path);
        File[] deleteFile = deletePath.listFiles();
        for (File file : deleteFile) {
            if (file.getName().endsWith(".json")) {
                FileReader fileReader = null;
                BufferedReader bufferedReader = null;
                try {
                    String s;
                    ObjectMapper mapper = new ObjectMapper();
                    ModelKDGoldCross modelKDGoldCross = new ModelKDGoldCross();
                    fileReader = new FileReader(file);
                    bufferedReader = new BufferedReader(fileReader);
                    while ((s = bufferedReader.readLine()) != null) {
                        modelKDGoldCross = mapper.readValue(s, ModelKDGoldCross.class);
                        modelKDGoldCrossDao.save(modelKDGoldCross);
                    }
                } catch (JsonParseException e) {
                    e.printStackTrace();
                } catch (JsonMappingException e) {
                    e.printStackTrace();
                } catch (IOException e) {
                    e.printStackTrace();
                } finally {
                    try {
                        bufferedReader.close();
                    } catch (IOException e) {
                        e.printStackTrace();
                    }
                    try {
                        fileReader.close();
                    } catch (IOException e) {
                        e.printStackTrace();
                    }
                }

                // 删除json文件
                FileUtil.deleteFile(file.toString());
                logger.info("删除文件【" + file.toString() + "】");
            }
        }
    }

    /**
     * 恢复表MDL_KD_DEAD_CROSS的数据
     */
    public void restoreModelKDDeadCrossIncrementalByJson() {
        logger.info("恢复表MDL_KD_DEAD_CROSS的数据");

        String path = new String(JSON_DATA_BACKUP_DIR + TableName.MDL_KD_DEAD_CROSS);
        File fPath = new File(path);
        File[] files = fPath.listFiles();
        for (File file : files) {
            try {
                // 将每一个zip文件解压为json文件
                ZipDecompressor.unZipFiles(file, path);
            } catch (IOException e) {
                e.printStackTrace();
            }
        }

        File deletePath = new File(path);
        File[] deleteFile = deletePath.listFiles();
        for (File file : deleteFile) {
            if (file.getName().endsWith(".json")) {
                FileReader fileReader = null;
                BufferedReader bufferedReader = null;
                try {
                    String s;
                    ObjectMapper mapper = new ObjectMapper();
                    ModelKDDeadCross modelKDDeadCross = new ModelKDDeadCross();
                    fileReader = new FileReader(file);
                    bufferedReader = new BufferedReader(fileReader);
                    while ((s = bufferedReader.readLine()) != null) {
                        modelKDDeadCross = mapper.readValue(s, ModelKDDeadCross.class);
                        modelKDDeadCrossDao.save(modelKDDeadCross);
                    }
                } catch (JsonParseException e) {
                    e.printStackTrace();
                } catch (JsonMappingException e) {
                    e.printStackTrace();
                } catch (IOException e) {
                    e.printStackTrace();
                } finally {
                    try {
                        bufferedReader.close();
                    } catch (IOException e) {
                        e.printStackTrace();
                    }
                    try {
                        fileReader.close();
                    } catch (IOException e) {
                        e.printStackTrace();
                    }
                }

                // 删除json文件
                FileUtil.deleteFile(file.toString());
                logger.info("删除文件【" + file.toString() + "】");
            }
        }
    }

    /**
     * 恢复表FOREIGN_EXCHANGE_RECORD的数据
     */
    public void restoreForeignExchangeRecordByJson() {
        logger.info("恢复表FOREIGN_EXCHANGE_RECORD的数据");

        String path = new String(JSON_DATA_BACKUP_DIR + TableName.FOREIGN_EXCHANGE_RECORD);
        File fPath = new File(path);
        File[] files = fPath.listFiles();
        for (File file : files) {
            try {
                // 将每一个zip文件解压为json文件
                ZipDecompressor.unZipFiles(file, path);
            } catch (IOException e) {
                e.printStackTrace();
            }
        }

        File deletePath = new File(path);
        File[] deleteFile = deletePath.listFiles();
        for (File file : deleteFile) {
            if (file.getName().endsWith(".json")) {
                try {
                    String s;
                    ObjectMapper mapper = new ObjectMapper();
                    ForeignExchangeRecord fer = new ForeignExchangeRecord();
                    FileReader fr = new FileReader(file);
                    BufferedReader br = new BufferedReader(fr);
                    try {
                        while ((s = br.readLine()) != null) {
                            fer = mapper.readValue(s, ForeignExchangeRecord.class);
                            foreignExchangeRecordDao.restoreForeignExchangeRecord(fer);
                        }
                    } catch (JsonParseException e) {
                        e.printStackTrace();
                    } catch (JsonMappingException e) {
                        e.printStackTrace();
                    } catch (IOException e) {
                        e.printStackTrace();
                    }
                } catch (FileNotFoundException e1) {
                    e1.printStackTrace();
                }

                // 删除json文件
                FileUtil.deleteFile(file.toString());
            }
        }

        logger.info("insert the data of table FOREIGN_EXCHANGE_RECORD incremental by json finish");
    }

    /**
     * 恢复表BOARD中的数据
     */
    public void restoreBoardIncrementalByJson() {
        logger.info("恢复表BOARD中的数据");

        String path = new String(JSON_DATA_BACKUP_DIR + TableName.BOARD);
        File fPath = new File(path);
        File[] files = fPath.listFiles();
        for (File file : files) {
            try {
                // 将每一个zip文件解压为json文件
                ZipDecompressor.unZipFiles(file, path);
            } catch (IOException e) {
                e.printStackTrace();
            }
        }

        File deletePath = new File(path);
        File[] deleteFile = deletePath.listFiles();
        for (File file : deleteFile) {
            if (file.getName().endsWith(".json")) {
                FileReader fileReader = null;
                BufferedReader bufferedReader = null;
                try {
                    String s;
                    ObjectMapper mapper = new ObjectMapper();
                    Board b = new Board();
                    fileReader = new FileReader(file);
                    bufferedReader = new BufferedReader(fileReader);
                    while ((s = bufferedReader.readLine()) != null) {
                        b = mapper.readValue(s, Board.class);
                        boardDao.saveBoard(b);
                    }
                } catch (FileNotFoundException e1) {
                    e1.printStackTrace();
                } catch (JsonParseException e) {
                    e.printStackTrace();
                } catch (JsonMappingException e) {
                    e.printStackTrace();
                } catch (IOException e) {
                    e.printStackTrace();
                } finally {
                    try {
                        bufferedReader.close();
                    } catch (IOException e) {
                        e.printStackTrace();
                    }
                    try {
                        fileReader.close();
                    } catch (IOException e) {
                        e.printStackTrace();
                    }
                }

                // 删除json文件
                FileUtil.deleteFile(file.toString());
                logger.info("删除文件【" + file.toString() + "】");
            }
        }
    }

    /**
     * 恢复表STOCK_INFO中的数据
     */
    public void restoreStockInfoIncrementalByJson() {
        logger.info("恢复表STOCK_INFO中的数据");

        String path = new String(JSON_DATA_BACKUP_DIR + TableName.STOCK_INFO);
        File fPath = new File(path);
        File[] files = fPath.listFiles();
        for (File file : files) {
            try {
                // 将每一个zip文件解压为json文件
                ZipDecompressor.unZipFiles(file, path);
            } catch (IOException e) {
                e.printStackTrace();
            }
        }

        File deletePath = new File(path);
        File[] deleteFile = deletePath.listFiles();
        for (File file : deleteFile) {
            if (file.getName().endsWith(".json")) {
                FileReader fileReader = null;
                BufferedReader bufferedReader = null;
                try {
                    String s;
                    ObjectMapper mapper = new ObjectMapper();
                    StockInfo stockInfo = new StockInfo();
                    fileReader = new FileReader(file);
                    bufferedReader = new BufferedReader(fileReader);
                    while ((s = bufferedReader.readLine()) != null) {
                        stockInfo = mapper.readValue(s, StockInfo.class);
                        stockInfoDao.saveStockInfo(stockInfo);
                    }
                } catch (FileNotFoundException e1) {
                    e1.printStackTrace();
                } catch (JsonParseException e) {
                    e.printStackTrace();
                } catch (JsonMappingException e) {
                    e.printStackTrace();
                } catch (IOException e) {
                    e.printStackTrace();
                } finally {
                    try {
                        bufferedReader.close();
                    } catch (IOException e) {
                        e.printStackTrace();
                    }
                    try {
                        fileReader.close();
                    } catch (IOException e) {
                        e.printStackTrace();
                    }
                }

                // 删除json文件
                FileUtil.deleteFile(file.toString());
                logger.info("删除文件【" + file.toString() + "】");
            }
        }
    }

    /**
     * 恢复表MODEL中的数据
     */
    public void restoreModelIncrementalByJson() {
        logger.info("恢复表MODEL中的数据");

        String path = new String(JSON_DATA_BACKUP_DIR + TableName.MODEL);
        File fPath = new File(path);
        File[] files = fPath.listFiles();
        for (File file : files) {
            try {
                // 将每一个zip文件解压为json文件
                ZipDecompressor.unZipFiles(file, path);
            } catch (IOException e) {
                e.printStackTrace();
            }
        }

        File deletePath = new File(path);
        File[] deleteFile = deletePath.listFiles();
        for (File file : deleteFile) {
            if (file.getName().endsWith(".json")) {
                FileReader fileReader = null;
                BufferedReader bufferedReader = null;
                try {
                    String s;
                    ObjectMapper mapper = new ObjectMapper();
                    Model model = new Model();
                    fileReader = new FileReader(file);
                    bufferedReader = new BufferedReader(fileReader);
                    while ((s = bufferedReader.readLine()) != null) {
                        model = mapper.readValue(s, Model.class);
                        modelDao.saveModel(model);
                    }
                } catch (FileNotFoundException e1) {
                    e1.printStackTrace();
                } catch (JsonParseException e) {
                    e.printStackTrace();
                } catch (JsonMappingException e) {
                    e.printStackTrace();
                } catch (IOException e) {
                    e.printStackTrace();
                } finally {
                    try {
                        bufferedReader.close();
                    } catch (IOException e) {
                        e.printStackTrace();
                    }
                    try {
                        fileReader.close();
                    } catch (IOException e) {
                        e.printStackTrace();
                    }
                }

                // 删除json文件
                FileUtil.deleteFile(file.toString());
                logger.info("删除文件【" + file.toString() + "】");
            }
        }
    }

    /**
     * 恢复表FOREIGN_EXCHANGE中的数据
     */
    public void restoreForeignExchangeIncrementalByJson() {
        logger.info("insert the data of table FOREIGN_EXCHANGE incremental by json begin");

        String path = new String(JSON_DATA_BACKUP_DIR + TableName.FOREIGN_EXCHANGE);
        File fPath = new File(path);
        File[] files = fPath.listFiles();
        for (File file : files) {
            try {
                // 将每一个zip文件解压为json文件
                ZipDecompressor.unZipFiles(file, path);
            } catch (IOException e) {
                e.printStackTrace();
            }
        }

        File deletePath = new File(path);
        File[] deleteFile = deletePath.listFiles();
        for (File file : deleteFile) {
            if (file.getName().endsWith(".json")) {
                try {
                    String s;
                    ObjectMapper mapper = new ObjectMapper();
                    ForeignExchange fe = new ForeignExchange();
                    FileReader fr = new FileReader(file);
                    BufferedReader br = new BufferedReader(fr);
                    try {
                        while ((s = br.readLine()) != null) {
                            fe = mapper.readValue(s, ForeignExchange.class);
                            foreignExchangeDao.insertForeignExchange(fe);
                        }
                    } catch (JsonParseException e) {
                        e.printStackTrace();
                    } catch (JsonMappingException e) {
                        e.printStackTrace();
                    } catch (IOException e) {
                        e.printStackTrace();
                    }
                } catch (FileNotFoundException e1) {
                    e1.printStackTrace();
                }

                // 删除json文件
                FileUtil.deleteFile(file.toString());
            }
        }

        logger.info("insert the data of table FOREIGN_EXCHANGE incremental by json finish");
    }

/*****************************************************************************************************************
 *
 * 									发送邮件
 *
 ******************************************************************************************************************/
    /**
     * 发送邮件
     */
    @Override
    public void sendEMail() {
        logger.info("开始发送邮件");

        // 参数
        String mailHost = PropertiesUtil.getValue(PROJECT_PROPERTIES, "mail.host");
        String mailTransportProtocol = PropertiesUtil.getValue(PROJECT_PROPERTIES, "mail.transport.protocol");
        String mailSmtpAuth = PropertiesUtil.getValue(PROJECT_PROPERTIES, "mail.smtp.auth");
        String mailUsername = PropertiesUtil.getValue(PROJECT_PROPERTIES, "mail.username");
        String mailPassword = PropertiesUtil.getValue(PROJECT_PROPERTIES, "mail.password");
        String mailSubject = PropertiesUtil.getValue(PROJECT_PROPERTIES, "mail.realTransactionMain.subject");

        Properties prop = new Properties();
        // 邮件服务器
        prop.setProperty("mail.host", mailHost);
        // 邮件发送协议
        prop.setProperty("mail.transport.protocol", mailTransportProtocol);
        // 需要验证用户名密码
        prop.setProperty("mail.smtp.auth", mailSmtpAuth);

        Transport ts = null;
        try {
            //使用JavaMail发送邮件的5个步骤
            //1.txt、创建定义整个应用程序所需的环境信息的Session对象
            Session session = Session.getDefaultInstance(prop, new Authenticator() {
                        public PasswordAuthentication getPasswordAuthentication() {
                            //发件人邮件用户名、授权码
                            return new PasswordAuthentication(mailUsername, mailPassword);
                        }
                    }
            );

            //开启Session的debug模式，这样就可以查看到程序发送Email的运行状态
            session.setDebug(true);

            //2、通过session得到transport对象
            ts = session.getTransport();

            //3、使用邮箱的用户名和授权码连上邮件服务器
            ts.connect(mailHost, mailUsername, mailPassword);

            //4，创建邮件
            //4-1.txt，创建邮件对象
            MimeMessage message = new MimeMessage(session);

            //4-2，指明邮件的发件人
            message.setFrom(new InternetAddress(mailUsername));

            //4-3，指明邮件的收件人
            message.setRecipient(Message.RecipientType.TO, new InternetAddress(mailUsername));

            //4-4，邮件标题
            message.setSubject(mailSubject + "_" + DateUtil.dateToString(new Date()));

            //4-5，邮件文本内容
            File file = new File(PRINT_REAL_BUY_SELL_SUGGESTION);
            FileReader fileReader = new FileReader(file);
            char[] buf = new char[128];
            int len;
            StringBuffer stringBuffer = new StringBuffer();
            while ((len = fileReader.read(buf)) != -1) {
                stringBuffer.append(new String(buf, 0, len));
            }
            message.setContent(stringBuffer.toString(), "text/html;charset=UTF-8");

            //4-6，发送邮件
            ts.sendMessage(message, message.getAllRecipients());
        } catch (Exception e) {
            e.printStackTrace();
        } finally {
            //5，关闭连接
            try {
                ts.close();
            } catch (MessagingException e) {
                e.printStackTrace();
            }
        }

        logger.info("发送邮件结束");
    }

    @Override
    public String listToString(List list) {
        // TODO Auto-generated method stub
        return null;
    }

}
