/*
 * Unidata Platform Community Edition
 * Copyright (c) 2013-2020, UNIDATA LLC, All rights reserved.
 * This file is part of the Unidata Platform Community Edition software.
 *
 * Unidata Platform Community Edition is free software: you can redistribute it and/or modify
 * it under the terms of the GNU General Public License as published by
 * the Free Software Foundation, either version 3 of the License, or
 * (at your option) any later version.
 *
 * Unidata Platform Community Edition is distributed in the hope that it will be useful,
 * but WITHOUT ANY WARRANTY; without even the implied warranty of
 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
 * GNU General Public License for more details.
 *
 * You should have received a copy of the GNU General Public License
 * along with this program. If not, see <https://www.gnu.org/licenses/>.
 */
package org.unidata.mdm.meta.service.impl;

import static org.unidata.mdm.core.configuration.UserMessageConstants.DATA_IMPORT_METADATA_FAILED;
import static org.unidata.mdm.meta.type.InputOutputModelStructureConstants.MEASURE;
import static org.unidata.mdm.meta.type.InputOutputModelStructureConstants.SECURITY;
import static org.unidata.mdm.meta.type.InputOutputModelStructureConstants.XML_EXTENSION;

import java.io.FileInputStream;
import java.io.IOException;
import java.io.InputStream;
import java.nio.file.FileVisitResult;
import java.nio.file.Files;
import java.nio.file.Path;
import java.nio.file.Paths;
import java.nio.file.SimpleFileVisitor;
import java.nio.file.attribute.BasicFileAttributes;
import java.util.ArrayList;
import java.util.Collections;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.Optional;
import java.util.Set;
import java.util.stream.Collectors;
import java.util.stream.Stream;

import javax.annotation.PostConstruct;

import org.apache.commons.collections4.CollectionUtils;
import org.apache.commons.lang3.StringUtils;
import org.jgrapht.experimental.dag.DirectedAcyclicGraph;
import org.jgrapht.traverse.TopologicalOrderIterator;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.core.convert.ConversionFailedException;
import org.springframework.stereotype.Service;
import org.springframework.transaction.PlatformTransactionManager;
import org.springframework.transaction.TransactionDefinition;
import org.springframework.transaction.support.TransactionTemplate;
import org.unidata.mdm.core.configuration.UserMessageConstants;
import org.unidata.mdm.core.context.ModelChangeContext;
import org.unidata.mdm.core.context.UpsertUserEventRequestContext;
import org.unidata.mdm.core.exception.SystemRuntimeException;
import org.unidata.mdm.core.service.MetaModelService;
import org.unidata.mdm.core.service.SecurityService;
import org.unidata.mdm.core.service.UserService;
import org.unidata.mdm.core.util.SecurityJaxbUtils;
import org.unidata.mdm.core.util.SecurityUtils;
import org.unidata.mdm.meta.configuration.Descriptors;
import org.unidata.mdm.meta.constant.IEConstants;
import org.unidata.mdm.meta.context.UpsertDataModelContext;
import org.unidata.mdm.meta.context.UpsertDataModelContext.UpsertDataModelContextBuilder;
import org.unidata.mdm.meta.dto.FullModelDTO;
import org.unidata.mdm.meta.exception.MetaExceptionIds;
import org.unidata.mdm.meta.exception.ModelImportExportException;
import org.unidata.mdm.meta.service.MetaModelImportService;
import org.unidata.mdm.meta.service.SecurityIOService;
import org.unidata.mdm.meta.service.impl.data.instance.EntitiesGroupImpl;
import org.unidata.mdm.meta.service.impl.data.instance.RelationImpl;
import org.unidata.mdm.meta.type.event.MetaModelImportEvent;
import org.unidata.mdm.meta.type.input.meta.GroupEdge;
import org.unidata.mdm.meta.type.input.meta.GroupGraph;
import org.unidata.mdm.meta.type.input.meta.GroupVertex;
import org.unidata.mdm.meta.type.input.meta.MetaAction;
import org.unidata.mdm.meta.type.input.meta.MetaExistence;
import org.unidata.mdm.meta.type.input.meta.MetaGraph;
import org.unidata.mdm.meta.type.input.meta.MetaType;
import org.unidata.mdm.meta.type.input.meta.MetaVertex;
import org.unidata.mdm.meta.type.model.DataModel;
import org.unidata.mdm.meta.type.model.entities.EntitiesGroup;
import org.unidata.mdm.meta.type.model.entities.Entity;
import org.unidata.mdm.meta.type.model.entities.LookupEntity;
import org.unidata.mdm.meta.type.model.entities.NestedEntity;
import org.unidata.mdm.meta.type.model.entities.Relation;
import org.unidata.mdm.meta.type.model.enumeration.EnumerationType;
import org.unidata.mdm.meta.type.model.measurement.MeasurementCategory;
import org.unidata.mdm.meta.type.model.measurement.MeasurementUnitsModel;
import org.unidata.mdm.meta.type.model.sourcesystem.SourceSystem;
import org.unidata.mdm.meta.util.MetaModelGraphComponent;
import org.unidata.mdm.meta.util.ModelUtils;
import org.unidata.mdm.meta.util.ZipUtils;
import org.unidata.mdm.system.exception.PlatformBusinessException;
import org.unidata.mdm.system.serialization.xml.XmlObjectSerializer;
import org.unidata.mdm.system.service.EventService;
import org.unidata.mdm.system.service.ExecutionService;
import org.unidata.mdm.system.service.RenderingService;
import org.unidata.mdm.system.util.TextUtils;

import com.google.common.base.Functions;
import com.hazelcast.core.HazelcastInstance;
import com.hazelcast.topic.ITopic;
import com.unidata.mdm.security.Security;

@Service
public class MetaModelImportServiceImpl implements MetaModelImportService {

    @Autowired
    private MetaModelService metaModelService;

    @Autowired
    private MetaModelGraphComponent metaModelGraphComponent;

    @Autowired
    private SecurityService securityService;

    @Autowired
    private UserService userService;

    @Autowired
    private SecurityIOService securityIOService;

    @Autowired
    private PlatformTransactionManager txManager;

    @Autowired
    private RenderingService renderingService;

    @Autowired
    private ExecutionService executionService;

    @Autowired
    private EventService eventService;

    private static final String META_MODEL_IMPORT_EVENT = "META_MODEL_IMPORT_EVENT";

    /**
     * The to load.
     */
    private Map<String, FullModelDTO> toLoad = new HashMap<>();

    /**
     * The topic.
     */
    private ITopic<MetaGraph> topic;

    @Autowired
    private HazelcastInstance hazelcastInstance;

    private static Logger LOGGER = LoggerFactory.getLogger(MetaModelImportServiceImpl.class);

    private static final String MODEL = "model";
    public static final String META_MODEL_PATH = "meta_model";

    @PostConstruct
    public void post() {
        topic = hazelcastInstance.getTopic("metaIETopic");
        topic.addMessageListener(new MetaImportListener(
                this, securityService, userService, hazelcastInstance.getCluster().getLocalMember().getUuid().toString()));
    }


    @Override
    public void importModel(final InputStream inputStream, final boolean recreate) {
        try {
            eventService.fire(new MetaModelImportEvent(MetaModelImportEvent.START_MAINTENANCE_STATUS,
                    META_MODEL_IMPORT_EVENT,
                    MetaModelImportEvent.OPERATION_ID));

            final DataModel model = XmlObjectSerializer.getInstance()
                    .fromXmlInputStream(DataModel.class, inputStream);

//            if (recreate && CollectionUtils.isEmpty(model.getSourceSystems())) {
//                model.getSourceSystems().add(ModelUtils.createDefaultSourceSystem());
//            }
            final UpsertDataModelContext updateModelRequestContext =
                    UpsertDataModelContext.builder()
//                            .enumerationsUpdate(model.getEnumerations() == null ? null
//                                    :
//                                    new ArrayList<>(model
//                                            .getEnumerations()
//                                            .stream()
//                                            .collect(Collectors.toCollection(() ->
//                                                    new TreeSet<>(Comparator.comparing(EnumerationType::getName)))))
//                            )
//                            .sourceSystemsUpdate(model.getSourceSystems())
                            .nestedEntitiesUpdate(model.getNestedEntities())
                            .lookupEntitiesUpdate(model.getLookupEntities())
                            .entitiesGroupsUpdate(model.getEntitiesGroup())
                            .entitiesUpdate(model.getEntities())
                            .relationsUpdate(model.getRelations())
                            .upsertType(
                                    recreate ?
                                            ModelChangeContext.ModelChangeType.FULL :
                                            ModelChangeContext.ModelChangeType.MERGE
                            )
                            .build();
            metaModelService.upsert(updateModelRequestContext);
        } catch (IOException e) {
            throw new RuntimeException(e);
        } finally {
            eventService.fire(new MetaModelImportEvent(MetaModelImportEvent.STOP_MAINTENANCE_STATUS,
                    META_MODEL_IMPORT_EVENT,
                    MetaModelImportEvent.OPERATION_ID));
        }
    }

    @Override
    public void importMeasureUnits(final InputStream measureUnitsInputStream) {
        try {
//            MeasurementUnitsModel values = MetaJaxbUtils.createMeasurementValuesFromInputStream(measureUnitsInputStream);
//            if (Objects.isNull(values)) {
//                return;
//            }
//
//            for (MeasurementCategory value : values.getValues()) {
//                metaMeasurementService.saveValue(convert(value));
//            }
        } catch (Exception e) {
            throw new RuntimeException(e);
        }
    }

    @Override
    public FullModelDTO prepareUploadData(Path rootFolder, Path pathToZipFile, boolean isOverride, String fileName, String id) {
        // if old model shouldn't be preserved no need to collect dependencies
        FullModelDTO fullModelDTOToCache = new FullModelDTO();
        fullModelDTOToCache.setOverride(isOverride);
        Path metaPath = Paths.get(rootFolder.toString(), META_MODEL_PATH);
        try {

            modelProcess(metaPath, isOverride, fileName, fullModelDTOToCache);

        } finally {
            cleanupTree(pathToZipFile, metaPath);
        }

        // save DTO at cache - to apply rest query
        cacheModelPreview(id, fullModelDTOToCache);

        return fullModelDTOToCache;

    }

    private void modelProcess(Path rootFolder, boolean isOverride, String fileName, FullModelDTO fullModelDTOToCache) {
        try {
            parseModel(isOverride, fullModelDTOToCache, rootFolder);
        } catch (ConversionFailedException e) {
            processConversionFailedException(fileName, e);
        } catch (Exception e) {
            processSystemRuntimeException(
                    fileName,
                    new SystemRuntimeException(
                            "Unable to parse zip file.", e,
                            MetaExceptionIds.EX_META_IMPORT_MODEL_UNABLE_TO_PARSE, fileName
                    )
            );
        }
    }

    @Override
    public Path getRootPath(Path pathToZipFile, String fileName) {
        try {
            return ZipUtils.unzipDir(pathToZipFile, Paths.get(IEConstants.IMPORT_PATH.toString(), MODEL));
        } catch (IOException e) {
            processSystemRuntimeException(
                    fileName,
                    new SystemRuntimeException(
                            "Unable to parse zip file.", e,
                            MetaExceptionIds.EX_META_IMPORT_MODEL_UNABLE_TO_PARSE, fileName
                    )
            );

            //todo : check strange exception logic

            throw new RuntimeException(e);
        }
    }

    public void cacheModelPreview(String id, FullModelDTO fromZip) {
        toLoad.put(id, fromZip);
    }

    private void processSystemRuntimeException(String fileName, SystemRuntimeException e) {
        try {
            deleteDirectoryContent(IEConstants.IMPORT_PATH);
        } catch (IOException e1) {
            throw new SystemRuntimeException("Unable to parse zip file.", e1,
                    MetaExceptionIds.EX_META_IMPORT_MODEL_UNABLE_TO_PARSE, fileName);
        }
        throw e;
    }

    private void processConversionFailedException(String fileName, ConversionFailedException e) {
        try {
            deleteDirectoryContent(IEConstants.IMPORT_PATH);
        } catch (IOException e1) {
            throw new SystemRuntimeException("Unable to parse zip file.", e1,
                    MetaExceptionIds.EX_META_IMPORT_MODEL_UNABLE_TO_PARSE, fileName);
        }
        // hack for business exception throwed from spring converter.
        throw e.getCause() instanceof PlatformBusinessException
                ? (PlatformBusinessException) e.getCause()
                :
                new SystemRuntimeException(
                        "Unable to parse zip file.",
                        e,
                        MetaExceptionIds.EX_META_IMPORT_MODEL_UNABLE_TO_PARSE,
                        fileName
                );
    }

    @Override
    public void fillResponseMetaGraphWithCurrentData(MetaGraph result) {
        if (result.isOverride()) return;

        FullModelDTO fromRuntime = new FullModelDTO()
                .withMeasurementValues(assembleMeasurement(null))
                .withModel(assembleModel(null));

        metaModelGraphComponent.enrich(fromRuntime, result, MetaExistence.EXIST, MetaType.values());
    }

    private static void deleteDirectoryContent(Path toImport) throws IOException {
        Files.walkFileTree(toImport, new SimpleFileVisitor<Path>() {

            @Override
            public FileVisitResult visitFile(Path file, BasicFileAttributes attrs) throws IOException {
                Files.delete(file);
                return FileVisitResult.CONTINUE;
            }

            @Override
            public FileVisitResult postVisitDirectory(Path dir, IOException exc) throws IOException {
                if (!dir.equals(toImport)) {
                    Files.delete(dir);
                }
                return FileVisitResult.CONTINUE;
            }
        });
    }

    private MeasurementUnitsModel assembleMeasurement(String storageId) {
        return metaModelService.instance(Descriptors.MEASUREMENT_UNITS, storageId, null).toSource();
    }

    private DataModel assembleModel(String storageId) {
        return metaModelService.instance(Descriptors.DATA, storageId, null).toSource();
    }

    @Override
    public void cleanupTree(Path zipFile, Path rootFolder) {
        try {
            Files.walkFileTree(rootFolder, new SimpleFileVisitor<Path>() {
                @Override
                public FileVisitResult visitFile(Path file, BasicFileAttributes attrs) throws IOException {
                    Files.deleteIfExists(file);
                    return FileVisitResult.CONTINUE;
                }

                @Override
                public FileVisitResult postVisitDirectory(Path dir, IOException exc) throws IOException {
                    Files.deleteIfExists(dir);
                    return FileVisitResult.CONTINUE;
                }
            });
            Files.deleteIfExists(zipFile);
        } catch (IOException e) {
            LOGGER.error("Unable to delete temporary metamodel export files", e);

        }
    }

    @Override
    public void enrich(MetaGraph metaGraph, MetaExistence aNew, MetaType[] values) {

        FullModelDTO fullModelDTO = toLoad.get(metaGraph.getId());

        metaModelGraphComponent.enrich(
                fullModelDTO,
                metaGraph,
                MetaExistence.NEW,
                MetaType.values()
        );
    }

    private void parseModel(boolean isOverride, FullModelDTO fullModelDTOToCache, Path rootFolder) throws IOException {

        Path modelFolder = Paths.get(rootFolder.toString(), MODEL);

        if (Files.exists(modelFolder)) {
            try (Stream<Path> stream = Files.list(modelFolder)) {
                Optional<Path> modelFile = stream.findAny();

                if (modelFile.isPresent()) {
                    try (InputStream is = new FileInputStream(modelFile.get().toFile())) {

                        DataModel model = XmlObjectSerializer.getInstance()
                                .fromXmlInputStream(DataModel.class, is);

                        // Note, that we filter nested entities on this step to avoid add/update non-referenced NestedEntities.
                        model.setNestedEntities(
                                ModelUtils.filterUsageNestedEntities(
                                        model.getNestedEntities(),
                                        model.getEntities())
                        );
                        fullModelDTOToCache.withModel(model);
                    }
                } else if (isOverride) {
                    DataModel model = createDefaultModel();
                    fullModelDTOToCache.withModel(model);
                }
            }

        } else if (isOverride) {
            DataModel model = createDefaultModel();
            fullModelDTOToCache.withModel(model);
        }

        Path measurementFolder = Paths.get(rootFolder.toString(), MEASURE);
        if (Files.exists(measurementFolder)) {
            try (Stream<Path> stream = Files.list(measurementFolder)) {
                Optional<Path> measurementFile = stream.findAny();
                if (measurementFile.isPresent()) {
                    try (InputStream is = new FileInputStream(measurementFile.get().toFile())) {
                        MeasurementUnitsModel measurementValues = XmlObjectSerializer.getInstance()
                                .fromXmlInputStream(MeasurementUnitsModel.class, is);
                        fullModelDTOToCache.withMeasurementValues(measurementValues);
                    }
                }
            }
        }

        Path securityFolder = Paths.get(rootFolder.toString(), SECURITY);
        if (Files.exists(securityFolder)) {
            try (Stream<Path> stream = Files.list(securityFolder)) {
                Optional<Path> securityFile = stream.findAny();
                if (securityFile.isPresent()) {
                    try (InputStream is = new FileInputStream(securityFile.get().toFile())) {
                        Security security = SecurityJaxbUtils.createSecurityFromInputStream(is);
                        fullModelDTOToCache.withSecurity(security);
                    }
                }
            }
        }
    }

    private DataModel createDefaultModel() {
        return new DataModel()
//                .withSourceSystems(Collections.singletonList(ModelUtils.createDefaultSourceSystem()))
                .withEntitiesGroup(ModelUtils.createDefaultEntitiesGroup());

    }


    @Override
    public List<String> validateImportedStructure(Path rootFolder, String fileName) {

        ArrayList<String> messages = new ArrayList<>();


        try {
            Path modelFolder = Paths.get(rootFolder.toString(), META_MODEL_PATH + "/" + MODEL);
            Path measureFolder = Paths.get(rootFolder.toString(), META_MODEL_PATH + "/" + MEASURE);
            Path securityFolder = Paths.get(rootFolder.toString(), META_MODEL_PATH + "/" + SECURITY);


            // model
            checkFolder(rootFolder, messages, modelFolder);
            // measure
            checkFolder(rootFolder, messages, measureFolder);
            // measure
            checkFolder(rootFolder, messages, securityFolder);
        } catch (Exception e) {
            processSystemRuntimeException(
                    fileName,
                    new SystemRuntimeException(
                            "Unable to parse zip file.", e,
                            MetaExceptionIds.EX_META_IMPORT_MODEL_UNABLE_TO_PARSE, fileName
                    )
            );
        }

        return messages;


    }

    private void checkFolder(Path root, List<String> messages, Path toCheck) throws IOException {
        if (Files.exists(toCheck)) {
            checkExtension(root, messages, toCheck, XML_EXTENSION);
            try (Stream<Path> stream = Files.list(toCheck)) {
                List<Path> list = stream.collect(Collectors.toList());
                if (list.size() > 1) {
                    messages.add(
                            TextUtils.getText(MetaExceptionIds.EX_META_IMPORT_MODEL_FILE_DUPL_NOT_ALLOWED.code(),
                                    StringUtils.substringAfter(toCheck.toString(), root.toString()), list.size()));
                }
            }
        }
    }

    private void checkExtension(Path root, List<String> messages, Path toCheck, String extension) throws IOException {
        try (Stream<Path> stream = Files.list(toCheck)) {
            stream.forEach(me -> {
                if (!StringUtils.endsWith(me.toString(), extension)) {
                    messages.add(TextUtils.getText(MetaExceptionIds.EX_META_IMPORT_MODEL_FILE_UNKNOWN.code(),
                            StringUtils.substringAfter(me.toString(), root.toString()),
                            StringUtils.substringAfter(toCheck.toString(), root.toString()), extension));
                }
            });
        }
    }


    @Override
    public MetaGraph apply(MetaGraph graph) {

        if (mastBeRejected(graph)) return graph;

        // TODO: 21.04.2020  maintenanceService to maintenance mode

        boolean isExc = false;

        try {

            processMeasurementValues(graph);


            if (toLoad.get(graph.getId()).getModel() != null) {
                UpsertDataModelContextBuilder builder =
                        UpsertDataModelContext.builder();

                renderModel(graph, builder);

                //rendering to upsert classifier and overs
                metaModelService.upsert(builder.build());
            }


            // must be uploaded after model, needs to be ref
            if (graph.isImportRoles() || graph.isImportUsers()) {
                securityIOService.importSecurityObjects(
                        toLoad.get(graph.getId()).getSecurity(), graph.isImportRoles(), graph.isImportUsers());
            }

        } catch (Exception e) {
            isExc = true;
            throw new ModelImportExportException("Unable to parse zip file.",
                    MetaExceptionIds.EX_META_IMPORT_MODEL_UNABLE_TO_PARSE,
                    graph.getFileName()
            );

        } finally {
            // end of maintenance mode and cleanup folder/draft
            finishApplyMetaGraph(graph, isExc);
        }


        auditResult(isExc);
        return graph;


//  todo      auditEventWriter.writeSuccessEvent(AuditActions.META_IMPORT, graph.getFileName());
//        executor.execute(() ->
//                userFavoriteEtalonsService.removeAbsentEntityFavorites()
//        );
    }

    private void finishApplyMetaGraph(MetaGraph graph, boolean isExc) {
        try {
            refresh(isExc);
            deleteDirectoryContent(IEConstants.IMPORT_PATH);

        } catch (IOException e) {
            isExc = true;
            throw new SystemRuntimeException("Unable to parse zip file.", e,
                    MetaExceptionIds.EX_META_IMPORT_MODEL_UNABLE_TO_PARSE, graph.getFileName());
        } finally {

            // TODO maintenanceService.transferTo(new SystemMode().withModeEnum(ModeEnum.NORMAL));
            if (!isExc) {
                toLoad.clear();
            }
        }
    }

    private void auditResult(boolean isExc) {
        try {

            UpsertUserEventRequestContext uueCtx = UpsertUserEventRequestContext.builder()
                    .login(SecurityUtils.getCurrentUserName())
                    .type("META_FULL_IMPORT")
                    .content(TextUtils.getText(isExc
                            ? DATA_IMPORT_METADATA_FAILED
                            : UserMessageConstants.DATA_IMPORT_METADATA_SUCCESS))
                    .build();

            userService.upsert(uueCtx);
        } catch (Exception e) {
            LOGGER.error("Cannot create report due to an exception", e);
        }
    }

    private void renderModel(
            MetaGraph graph,
            UpsertDataModelContextBuilder builder
    ) throws DirectedAcyclicGraph.CycleFoundException {

        FullModelDTO toProcess = toLoad.get(graph.getId());

        builder.upsertType(
                graph.isOverride() ?
                        ModelChangeContext.ModelChangeType.FULL :
                        ModelChangeContext.ModelChangeType.MERGE
        );

        List<Entity> entitiesToUpdate = entitiesToUpdate(graph, toProcess);
        builder.entitiesUpdate(entitiesToUpdate);

        List<LookupEntity> lookupsToUpdate = lookupsToUpdate(graph, toProcess);
        builder.lookupEntitiesUpdate(lookupsToUpdate);

        List<NestedEntity> nestedToUpdate = nestedToUpdate(graph, toProcess);
        builder.nestedEntitiesUpdate(nestedToUpdate);

        List<Relation> relationsToUpdate = relationsToUpdate(graph, toProcess, entitiesToUpdate);
        builder.relationsUpdate(relationsToUpdate);

//        Set<EnumerationType> enumerationsToUpdate = enumerationsToUpdate(graph, toProcess);
//        builder.enumerationsUpdate(new ArrayList<>(enumerationsToUpdate));
//
//        List<SourceSystem> sourceSystemsToUpdate = sourceSystemsToUpdate(graph, toProcess);
//        builder.sourceSystemsUpdate(sourceSystemsToUpdate);


        EntitiesGroup groupsToUpdate = groupsToUpdate(graph, toProcess);
        if (groupsToUpdate != null) {
            builder.entitiesGroupsUpdate(groupsToUpdate);
        }

    }

    private boolean mastBeRejected(MetaGraph graph) {
        boolean modelNotPreloaded = !toLoad.containsKey(graph.getId());
        if (modelNotPreloaded) {
            if (!graph.isRedirected()) {
                graph.setRedirected(true);
                graph.setSecurityToken(SecurityUtils.getCurrentUserToken());
                topic.publish(graph);
            }
            return true;
        }
        return false;
    }

    private void processMeasurementValues(MetaGraph graph) {
        FullModelDTO toProcess = toLoad.get(graph.getId());

        MeasurementUnitsModel measurementValues = renderMeasurementValues(graph, toProcess);

        upsertMeasurementValues(graph, measurementValues);
    }

    private void upsertMeasurementValues(MetaGraph graph, MeasurementUnitsModel measurementValues) {
        if (graph.isOverride()) {
//            metaMeasurementService.batchRemove(
//                    metaMeasurementService.getAllValues().stream()
//                            .map(org.unidata.mdm.core.type.measurement.MeasurementValue::getId)
//                            .collect(Collectors.toSet()), false, true);
        }
//        if (measurementValues != null) {
//            List<MeasurementCategory> valueDefs = measurementValues.getValues();
//            List<org.unidata.mdm.core.type.measurement.MeasurementValue> values = new ArrayList<>();
//            for (MeasurementCategory value : valueDefs) {
//                values.add(convert(value));
//            }
//            metaMeasurementService.saveValues(values);
//        }
    }


    /**
     * Measurements to update.
     *
     * @param graph the graph
     * @param toProcess the to process
     * @return the measurement values
     */
    private MeasurementUnitsModel renderMeasurementValues(MetaGraph graph, FullModelDTO toProcess) {
        if (toProcess == null || toProcess.getMeasurementValues() == null) {
            return null;
        }

        Set<String> mvs = graph.vertexSet().stream()
                .filter(v -> (v.getAction() == MetaAction.UPSERT && v.getType() == MetaType.MEASURE))
                .map(MetaVertex::getId)
                .collect(Collectors.toSet());
        List<MeasurementCategory> toUpdate = toProcess.getMeasurementValues().getValues().stream()
                .filter(v -> mvs.contains(v.getName()))
                .collect(Collectors.toList());
        toProcess.getMeasurementValues().getValues().clear();
        toProcess.getMeasurementValues().withValues(toUpdate);
        return toProcess.getMeasurementValues();
    }

    /**
     * Source systems to update.
     *
     * @param graph the graph
     * @param toProcess the to process
     * @return the list
     */
    private List<SourceSystem> sourceSystemsToUpdate(MetaGraph graph, FullModelDTO toProcess) {
        if (toProcess.getModel().getEntities() == null) {
            return Collections.emptyList();
        }

//        Set<String> ss = graph.vertexSet().stream()
//                .filter(v -> (v.getAction() == MetaAction.UPSERT && v.getType() == MetaType.SOURCE_SYSTEM))
//                .map(MetaVertex::getId).collect(Collectors.toSet());
//        return toProcess.getModel().getSourceSystems().stream().filter(e -> ss.contains(e.getName()))
//                .collect(Collectors.toList());
        return Collections.emptyList();
    }

    /**
     * Enumerations to update.
     *
     * @param graph the graph
     * @param toProcess the to process
     * @return the list
     */
    private Set<EnumerationType> enumerationsToUpdate(MetaGraph graph, FullModelDTO toProcess) {

//        if (CollectionUtils.isEmpty(toProcess.getModel().getEnumerations())) {
            return Collections.emptySet();
//        }


//        Set<String> enums = graph.vertexSet().stream()
//                .filter(v -> v.getAction() == MetaAction.UPSERT && v.getType() == MetaType.ENUM)
//                .map(MetaVertex::getId)
//                .collect(Collectors.toSet());
//
//        Map<String, EnumerationType> collect = toProcess.getModel()
//                .getEnumerations().stream()
//                .filter(e -> enums.contains(e.getName()))
//                // TODO: 22.04.2020 merge?
//                .collect(
//                        Collectors.toMap(
//                                EnumerationType::getName,
//                                p -> p,
//                                (p, q) -> p
//                        )
//                );
//
//        Collection<EnumerationType> values = collect.values();
//
//        return new HashSet<>(values);
    }

    /**
     * Relations to update.
     *
     * @param graph the graph
     * @param toProcess the to process
     * @param entitiesToUpdate
     * @return the list
     */
    private List<Relation> relationsToUpdate(MetaGraph graph, FullModelDTO toProcess, List<Entity> entitiesToUpdate) {

        Map<String, Relation> newRelations = new HashMap<>();

        if (toProcess.getModel().getRelations() != null) {
            Set<String> rels = graph.vertexSet().stream()
                    .filter(v -> v.getAction() == MetaAction.UPSERT && v.getType() == MetaType.RELATION)
                    .map(MetaVertex::getId)
                    .collect(Collectors.toSet());

            newRelations = toProcess
                    .getModel()
                    .getRelations()
                    .stream()
                    .filter(e -> rels.contains(e.getName()))
                    .collect(Collectors.toMap(Relation::getName, r -> r));
            if (graph.isOverride()) {
                return new ArrayList<>(newRelations.values());
            }

        }


        List<String> entitiesNames = entitiesToUpdate != null
                ? entitiesToUpdate.stream().map(Entity::getName).collect(Collectors.toList())
                : Collections.emptyList();

        Map<String, Relation> oldRelations =
                metaModelService.instance(Descriptors.DATA).getRelations()
                        .stream()
                        .filter(r -> entitiesNames.contains(r.getLeft().getName())
                                  && entitiesNames.contains(r.getRight().getName()))
                        .map(r -> ((RelationImpl) r).getSource())
                        .collect(Collectors.toMap(Relation::getName, Functions.identity()));

        Map<String, Relation> mergedRelations =
                Stream.concat(
                        oldRelations.entrySet().stream(),
                        newRelations.entrySet().stream()
                )
                        .collect(Collectors.toMap(
                                Map.Entry::getKey,
                                Map.Entry::getValue,
                                (value1, value2) -> value2.withName(value1.getName())
                                )
                        );

        return new ArrayList<>(mergedRelations.values());
    }

    /**
     * Nested to update.
     *
     * @param graph the graph
     * @param toProcess the to process
     * @return the list
     */
    private List<NestedEntity> nestedToUpdate(MetaGraph graph, FullModelDTO toProcess) {

        if (toProcess.getModel().getEntities() == null) {
            return Collections.emptyList();
        }


        Set<String> nents = graph.vertexSet().stream()
                .filter(v -> (v.getAction() == MetaAction.UPSERT && v.getType() == MetaType.NESTED_ENTITY))
                .map(MetaVertex::getId)
                .collect(Collectors.toSet());

        // Note, that we filter nested entities on this step to avoid add/update non-referenced NestedEntities.
        return ModelUtils
                .filterUsageNestedEntities(
                        toProcess.getModel().getNestedEntities(),
                        toProcess.getModel().getEntities()
                )
                .stream()
                .filter(e -> nents.contains(e.getName()))
                .collect(Collectors.toList());
    }

    /**
     * Entities to update.
     *
     * @param graph the graph
     * @param fullModel the full model
     * @return the list
     */
    private List<Entity> entitiesToUpdate(MetaGraph graph, FullModelDTO fullModel) {
        if (fullModel.getModel().getEntities() == null) {
            return Collections.emptyList();
        }

        Set<String> ents = graph.vertexSet().stream()
                .filter(v -> (v.getAction() == MetaAction.UPSERT
                        && (v.getType() == MetaType.LOOKUP || v.getType() == MetaType.ENTITY)))
                .map(MetaVertex::getId)
                .collect(Collectors.toSet());


        return fullModel.getModel().getEntities().stream()
                .filter(e -> ents.contains(e.getName()))
                .collect(Collectors.toList());
    }

    /**
     * Lookups to update.
     *
     * @param graph the graph
     * @param fullModel the full model
     * @return the list
     */
    private List<LookupEntity> lookupsToUpdate(MetaGraph graph, FullModelDTO fullModel) {
        if (fullModel.getModel().getLookupEntities() == null) {
            return Collections.emptyList();
        }

        Set<String> lents = graph.vertexSet().stream()
                .filter(v -> (v.getAction() == MetaAction.UPSERT
                        && (v.getType() == MetaType.LOOKUP || v.getType() == MetaType.ENTITY)))
                .map(MetaVertex::getId)
                .collect(Collectors.toSet());

        return fullModel.getModel().getLookupEntities().stream()
                .filter(e -> lents.contains(e.getName()))
                .collect(Collectors.toList());
    }

    public void refresh(boolean isExc) {
        TransactionTemplate txTemplate = new TransactionTemplate(txManager);
        txTemplate.setPropagationBehavior(
                isExc ? TransactionDefinition.PROPAGATION_REQUIRES_NEW : TransactionDefinition.PROPAGATION_REQUIRED);
        txTemplate.execute(status -> {
//                registrationService.cleanup();// TODO: @Modules
            //measurementService.afterModuleStartup();
            metaModelService.afterModuleStartup();
            return null;
        });
    }

    private EntitiesGroup groupsToUpdate(MetaGraph graph, FullModelDTO toProcess)
            throws DirectedAcyclicGraph.CycleFoundException {

        if (graph.isOverride()) {
            return toProcess.getModel().getEntitiesGroup();
        }

        Set<String> gs = graph.vertexSet().stream()
                .filter(v -> v.getAction() == MetaAction.UPSERT && v.getType() == MetaType.GROUPS)
                .map(MetaVertex::getId)
                .collect(Collectors.toSet());

        EntitiesGroup newGroup = toProcess.getModel().getEntitiesGroup();
        GroupGraph newGroupGraph = new GroupGraph();
        fillGroupGraph(StringUtils.EMPTY, newGroup, newGroup.getInnerGroups(), newGroupGraph);

        EntitiesGroup oldGroup = ((EntitiesGroupImpl) metaModelService.instance(Descriptors.DATA).getRootGroup()).getSource();
        GroupGraph oldGroupGraph = new GroupGraph();
        fillGroupGraph(StringUtils.EMPTY, oldGroup, oldGroup.getInnerGroups(), oldGroupGraph);

        // intersect two graphs
        TopologicalOrderIterator<GroupVertex, GroupEdge> topologicalOrderIterator =
                new TopologicalOrderIterator<>(
                        newGroupGraph
                );

        while (topologicalOrderIterator.hasNext()) {

            GroupVertex newVertex = topologicalOrderIterator.next();
            if (!gs.contains(newVertex.getId())) {
                continue;
            }
            if (oldGroupGraph.containsVertex(newVertex)) {
                GroupVertex oldVertex = oldGroupGraph.getVertexById(newVertex.getId());
                oldVertex.getValue().setDisplayName(newVertex.getValue().getDisplayName());
            } else {
                intersectSubTree(newVertex, newGroupGraph, oldGroupGraph);
            }
        }

        return oldGroup;
    }

    private void fillGroupGraph(String path, EntitiesGroup top, List<EntitiesGroup> inner, GroupGraph groupGraph)
            throws DirectedAcyclicGraph.CycleFoundException {

        if (CollectionUtils.isEmpty(inner)) {
            return;
        }

        String realPath = StringUtils.isEmpty(path)
                ? top.getName()
                : String.join(".", path, top.getName());

        GroupVertex topV = new GroupVertex(top, realPath);
        groupGraph.addVertex(topV);

        for (EntitiesGroup inn : inner) {
            GroupVertex innV = new GroupVertex(top, realPath);
            groupGraph.addVertex(innV);
            groupGraph.addDagEdge(topV, innV);
            fillGroupGraph(realPath, inn, inn.getInnerGroups(), groupGraph);
        }
    }

    private void intersectSubTree(GroupVertex toAdd, GroupGraph from, GroupGraph to) {
        String idToEnrich = StringUtils.substringBeforeLast(toAdd.getId(), ".");
        GroupVertex oldVertex = to.getVertexById(idToEnrich);
        if (oldVertex != null) {
            oldVertex.getValue().getInnerGroups().add(toAdd.getValue());
        } else {
            intersectSubTree(from.getVertexById(idToEnrich), from, to);
        }
    }


}
