package software.lib.service;

import lsh.LshPackage.Pair;
import net.sf.json.JSONArray;
import net.sf.json.JSONObject;
import org.apache.commons.lang3.StringUtils;
import org.apache.commons.lang3.tuple.ImmutablePair;
import org.neo4j.driver.v1.Record;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.stereotype.Service;
import org.springframework.transaction.annotation.Transactional;
import software.lib.dao.RecordDao;
import software.lib.entity.Gig;
import software.lib.entity.RecordGig;
import software.lib.entity.graph.Node;
import software.lib.util.NeoDriver;

import javax.sql.DataSource;
import java.sql.Connection;
import java.sql.PreparedStatement;
import java.sql.ResultSet;
import java.sql.SQLException;
import java.util.*;
import java.util.function.Consumer;
import java.util.stream.Collectors;
import java.util.stream.Stream;

/**
 * Created by Wang on 2017/5/16.
 */
@Service
@Transactional
public class NemaService {
    //参数α
    public static final double alpha = 0.5;
    public static final double THRESHHOLD = 0.5;
    public static final int LOOP = 3;
    public static final double MAXNUM = Double.MAX_VALUE;
    public static final double SIMILARITY_MIN = 0.3;
    public static final double lambda = 0.7;


    public ImmutablePair<List<Node>, List<Node>> doNema(Map<String, String> input, Pair[] candidateGigIds){
        org.apache.commons.lang3.tuple.Pair<Map<Integer,Node>, Map<Integer,Set<Integer>>> target = createTargetGraph(candidateGigIds);

        System.out.println("-------------- Create Target Graph OK! -----------------");

        org.apache.commons.lang3.tuple.Pair<Map<Integer,Node>, Map<Integer,Set<Integer>>> query = createQueryGraph(input);

        System.out.println("-------------- Create Query Graph OK! -----------------");

        Map<Integer,Node> nodeMapOfTarget = target.getLeft();
        Map<Integer,Set<Integer>> neighborVecOfTarget = target.getRight();
        Map<Integer,Map<Integer,Double>> neighborVecOfTargetWithDistance =
                neighborVecOfTarget.entrySet().parallelStream()
                .map(entry-> new org.apache.commons.lang3.tuple.ImmutablePair<Integer,Map<Integer,Double>>(entry.getKey(),entry.getValue().parallelStream().collect(Collectors.<Integer,Integer,Double>toMap(x->x,x->alpha))))
                .collect(Collectors.toMap(x->x.getKey(),x->x.getValue()));

        Map<Integer,Node> nodeMapOfQuery = query.getLeft();
        Map<Integer,Set<Integer>> neighborVecOfQuery = query.getRight();
        Map<Integer,Map<Integer,Double>> neighborVecOfQueryWithDistance =
                neighborVecOfTarget.entrySet().parallelStream()
                        .map(entry-> new org.apache.commons.lang3.tuple.ImmutablePair<Integer,Map<Integer,Double>>(entry.getKey(),entry.getValue().parallelStream().collect(Collectors.<Integer,Integer,Double>toMap(x->x,x->alpha))))
                        .collect(Collectors.toMap(x->x.getKey(),x->x.getValue()));

        //M 是候选集 key - Query 图中的节点ID    value -- Target 图中匹配程度高于阈值的节点列表
        Map<Integer,Set<Integer>> M = new HashMap<>();

        //gig标签的相似度 由LSH计算出的
        Map<Integer, Double> gigSim = new HashMap<Integer, Double>();

        nodeMapOfQuery.entrySet().parallelStream().forEach(entry -> {
            Node qnode = entry.getValue();
            int id = entry.getKey();
            if (qnode.getType().equals("gig")){
                M.put(id,new TreeSet<>());
            }else{
                M.put(id, nodeMapOfTarget.values().parallelStream()
                        .filter(tnode -> delta(qnode, tnode,gigSim) >= THRESHHOLD)
                        .map(tnode -> tnode.id)
                        .collect(Collectors.toSet()));
            }
        });

        Arrays.stream(candidateGigIds)
                .filter(pair -> pair.similarity >= SIMILARITY_MIN)
                .peek(pair -> gigSim.put(pair.nid,pair.similarity))
                //此处的1 指的是 查询子图中的 gig节点
                .forEach(pair -> M.get(1).add(pair.nid));


        System.out.println("-------------- Begin U0 -----------------");

        //初始化推倒代价 生成U0
        Map<Integer, Map<Integer, Double>> U0 = new HashMap<>();
        nodeMapOfQuery.entrySet().parallelStream().forEach(entry -> {
            int qid = entry.getKey();
            Node qnode = entry.getValue();
            Map<Integer, Double> v_cost = new TreeMap<>();
            //遍历候选集合
            M.get(qid).parallelStream().forEach(tid -> {
                double[] sum = new double[2];

                //遍历Query图的邻居节点
                neighborVecOfQuery.get(qid).parallelStream().filter(integer -> integer!=null).forEach(q_neighbor_id -> {
                   OptionalDouble maxValue =  M.get(q_neighbor_id).parallelStream()
                           .mapToDouble(t_mapped_id -> distanceT(tid,t_mapped_id,neighborVecOfTargetWithDistance) )
                           .max();
                   sum[0] += delta_plus(distanceQ(qid, q_neighbor_id,neighborVecOfQueryWithDistance),maxValue.orElse(0.0));
                   sum[1] += distanceQ(qid, q_neighbor_id,neighborVecOfQueryWithDistance);
                });

                double fi = lambda * (1-delta(nodeMapOfQuery.get(qid),nodeMapOfTarget.get(tid),gigSim)) + (1-lambda) * sum[0] / sum[1];
                v_cost.put(tid, fi);
            });
            U0.put(qid, v_cost);
        });

        System.out.println("-------------- End U0 -----------------");

        Map<Integer, Map<Integer, Double>>[] U = new Map[LOOP+1];
        Map<Integer, Integer>[] OPT = new Map[LOOP+1];//匹配结果

        U[0] = U0;
        for(int loop = 1; loop <= LOOP; loop++){
            U[loop] = new HashMap<Integer, Map<Integer, Double>>();

            /**
             * key -- query node id     value -- target node id
             */
            OPT[loop] = new HashMap<Integer, Integer>();

            int[] loop_pointer = new int[1];
            loop_pointer[0] = loop;

            nodeMapOfQuery.entrySet().stream().forEach(entry -> {
                double opt_cost = MAXNUM;
                Integer qid = entry.getKey();
                Map<Integer, Double> v_cost = new HashMap<Integer, Double>();
                for(Integer tid : M.get(qid)){//候选集
                    double W = calc_W(U[loop_pointer[0]-1], qid, tid,neighborVecOfTargetWithDistance,neighborVecOfQueryWithDistance,M);
                    double cost = W + 1 - delta(nodeMapOfQuery.get(qid), nodeMapOfTarget.get(tid),gigSim);
                    v_cost.put(tid, cost);
                    if(opt_cost > cost){
                        opt_cost = cost;
                        OPT[loop_pointer[0]].put(qid, tid);
                    }
                    //System.out.println(W);
                }
                U[loop_pointer[0]].put(qid, v_cost);
            });

        }

        System.out.println("-------------- End UN -----------------");

        try (NeoDriver neodriver = new NeoDriver()) {
            Set<Node> gigs = OPT[LOOP].entrySet().stream()
            .filter(entry -> {
                switch (nodeMapOfTarget.get(entry.getValue()).type){
                    case "Language":
                        return false;
                    case "Tag":
                        return false;
                    default:
                        return true;
                }
            })
            .flatMap(entry ->
            {
                Integer tid = entry.getValue();
                String type = nodeMapOfTarget.get(tid).type;

                switch (type){
                    case "Language":
                    case "Tag":
                        return  Stream.empty();
                    case "gig":
                        return neodriver.query("start n=node(" + tid + ")  MATCH (n) RETURN id(n) As id").list().stream();
                    case "seller":
                        return neodriver.query("start n=node(" + tid + ")  MATCH (n)<-[:by]-(x:gig) RETURN id(x) As id").list().stream();
                    case "score":
                    case "timespan":
                    case "duration":
                    case "price":
                        return neodriver.query("start n=node(" + tid + ")  MATCH (n)-[:of]->(x:gig) RETURN id(x) As id").list().stream();
                    case "sale_effiency":
                    default:
                        return neodriver.query("start n=node(" + tid + ") MATCH (n)-[:of]->(s:seller)<-[:by]-(x:gig) RETURN id(x) As id").list().stream();

                }

            }).map(record -> record.get("id").asInt())
                    .map(id -> nodeMapOfTarget.get(id))
                    .collect(Collectors.toSet());

            System.out.println("-------------- Generate Result Gig Set OK! -----------------");

            //按照匹配代价排序
            Map<Node, Double> costMap = new TreeMap<>();
            gigs.parallelStream().forEach(node -> {
                Map<String, Node> costMapOfTarget = new TreeMap<>();
                costMapOfTarget.put("gig",node);
                neighborVecOfTarget.get(node.id).parallelStream().map(id->nodeMapOfTarget.get(id))
                        .peek(neighbor -> costMapOfTarget.put(neighbor.type,neighbor))
                        .filter(neighbor -> neighbor.type.equals("seller") )
                        .flatMap(seller->neighborVecOfTarget.get(seller.id).stream())
                        .map(attributeId->nodeMapOfTarget.get(attributeId))
                        .filter(attribute->!attribute.type.equals("gig"))
                        .forEach(attribute ->costMapOfTarget.put(attribute.type,attribute));

                double sums[] = new double[3];
                nodeMapOfQuery.values().stream().forEach(queryNode -> {
                    Map<Integer, Double> queryNeighborMap = neighborVecOfQueryWithDistance.get(queryNode.id);
                    if (queryNeighborMap!=null && queryNeighborMap.size()>0)
                    queryNeighborMap.entrySet().stream().forEach(q_neighbor_entry -> {
                        Node q_neighbor_node = nodeMapOfQuery.get(q_neighbor_entry.getKey());
                        sums[0] += delta_plus(q_neighbor_entry.getValue(), distanceT(costMapOfTarget.get(q_neighbor_node.type).id, q_neighbor_node.id,neighborVecOfTargetWithDistance));
                        sums[1] += q_neighbor_entry.getValue();
                    });
                    sums[2] += 0.3 * (1-delta(queryNode,costMapOfTarget.get(queryNode.type),gigSim)) + 0.7 * sums[0] / sums[1];
                });
                costMap.put(node, sums[2]);
            });

            System.out.println("-------------- Sorted By Cost OK! -----------------");

            List<Node> gigsByCostDesc = costMap.entrySet().stream()
                    .sorted(Comparator.comparing(Map.Entry::getValue))
                    .map(nodeDoubleEntry -> nodeDoubleEntry.getKey())
                    .collect(Collectors.toList());


            //按得分排序
            List<Node> gigsByScoreDesc = sortByScore(gigs);

            System.out.println("-------------- Sorted By Score OK! -----------------");

            return new org.apache.commons.lang3.tuple.ImmutablePair<List<Node>,List<Node>>(gigsByCostDesc,gigsByScoreDesc);
        }
    }

    public List<Node> sortByScore(Set<Node> gigs){ // 按得分排序
        try (Connection conn = dataSource.getConnection();
             NeoDriver neodriver = new NeoDriver()
        ){
            PreparedStatement preparedStatement = conn.prepareStatement("SELECT score FROM gig where gig_id=?");
            return gigs.stream().sorted(Comparator.<Node,Long>comparing(node -> {
                try {
                    String gigIdStr = neodriver.query("start n=node(" + node.id + ")  MATCH (n) RETURN n.id As id").single().get("id").asString();
                    long gigId = Long.parseLong(gigIdStr);
                    preparedStatement.setLong(1,gigId);
                    ResultSet rs = preparedStatement.executeQuery();
                    if (rs.next()){
                        return rs.getLong(1);
                    }
                } catch (SQLException e) {
                    e.printStackTrace();
                }catch (NumberFormatException e){
                    e.printStackTrace();
                }
                return 0l;
            }).reversed()).collect(Collectors.toList());
        } catch (SQLException e) {
            e.printStackTrace();
        }
        return null;
    }

    /**
     * 生成目标子图邻接表
     * @param candidateGigIds 经过LSH后的待选gig列表
     * @return 返回 Target(V,E)
     */
    private static org.apache.commons.lang3.tuple.Pair<Map<Integer,Node>, Map<Integer,Set<Integer>>> createTargetGraph(Pair[] candidateGigIds){
        try (NeoDriver neodriver = new NeoDriver()){
            //TODO 此处为邻接表 边的属性标签被抹除 以后应该优化
            Map<Integer,Set<Integer>> neighborVec = new TreeMap<>();
            Consumer<Record> recordConsumer = record -> {
                //TODO 当前的查询方法使用的是无向图 虽然neo4j存储的是有向图
                int nid = record.get("nid").asInt();
                int id = record.get("id").asInt();

                if (!neighborVec.containsKey(nid)) {
                    neighborVec.put(nid, new TreeSet<>());
                }
                neighborVec.get(nid).add(id);

                if (!neighborVec.containsKey(id)) {
                    neighborVec.put(id, new TreeSet<>());
                }
                neighborVec.get(id).add(nid);
            };


            //可选的全体服务提供者列表
            Set<Node> sellers = Arrays.stream(candidateGigIds)
                    .parallel()
                    .filter(pair -> pair.similarity >= SIMILARITY_MIN)
                    .map(pair -> neodriver.query( "start n=node("+pair.nid+") MATCH (n)-[]-(x:seller) RETURN ID(x) As id,x.name As name, Labels(x)[0] As type"))
                    .flatMap(r->r.list().stream())
                    .filter(record -> record.containsKey("type"))
                    .filter(record -> record.get("type").asString().equals("seller"))
                    .map(record -> new Node(record.get("id").asInt(),"seller",record.get("name").asString()))
                    .collect(Collectors.toSet());

            //所有可选服务 范围大于LSH后的待选gig列表
            Set<Node> gigs = new TreeSet<>();
            //服务提供者的属性列表
            Set<Node> sellerAttributes = sellers.stream()
                    .flatMap(seller->neodriver.query( "start n=node("+seller.id+") MATCH (n)-[]-(x) RETURN ID(x) As id, Labels(x)[0] As type, x.name As name, ID(n) As nid ").list().stream())
                    .filter(record -> record.containsKey("type"))
                    .peek(recordConsumer)
                    .peek(record -> {
                        if(record.get("type").asString().equals("gig")){
                            gigs.add(new Node(record.get("id").asInt(),"gig",record.get("name").asString()));
                        }
                    })
                    .filter(record -> !record.get("type").asString().equals("gig"))
                    .map(record -> new Node(record.get("id").asInt(),record.get("type").asString(),record.get("name").asString()))
                    .collect(Collectors.toSet());

            //服务的描述
            Set<Node> gigAttributes = gigs.stream()
                    .flatMap(gig->neodriver.query( "start n=node("+gig.id+") MATCH (n)-[]-(x) RETURN ID(x) As id, Labels(x)[0] As type, x.name As name,ID(n) as nid").list().stream())
                    .filter(record -> record.containsKey("type"))
                    .peek(recordConsumer)
                    .filter(record -> !record.get("type").asString().equals("seller"))
                    .map(record -> new Node(record.get("id").asInt(),record.get("type").asString(),record.get("name").asString()))
                    .collect(Collectors.toSet());

            //TODO 暂使用原有算法 忽略节点类型 生成节点的Map 和 邻接表
            Map<Integer,Node> nodeMap =
                    Stream.of(sellers,gigs,sellerAttributes,gigAttributes).flatMap(x->x.stream())
                    .collect(Collectors.toMap(node->node.id,node->node,(node1, node2) -> node1));

            return new org.apache.commons.lang3.tuple.ImmutablePair<Map<Integer,Node>, Map<Integer,Set<Integer>>>(nodeMap,neighborVec);
        }
    }

    /**
     * 生成查询子图的邻接表
     * @param input 用户输入  如：{"score":"128","country":"America"}
     * @return 返回 Query(V,E)
     */
    private static org.apache.commons.lang3.tuple.Pair<Map<Integer,Node>, Map<Integer,Set<Integer>>> createQueryGraph(Map<String, String> input){
        try (NeoDriver neodriver = new NeoDriver()){
            //TODO 此处为邻接表 边的属性标签被抹除 以后应该优化
            Map<Integer,Set<Integer>> neighborVec = new TreeMap<>();

            Map<Integer,Node> nodeMap = new TreeMap<>();
            int index = 0;
            String[] attrib = {"score", "timespan", "duration", "price", "sale_effiency", "country", "language","Language", "education", "certification", "level", "avg_resp_time"};
            nodeMap.put((++index), new Node(index, "gig", input.get("gig")));
            nodeMap.put((++index), new Node(index, "seller", " "));
            for(int i = 0; i < attrib.length; i++){
                if(input.get(attrib[i])!=null && !input.get(attrib[i]).equals("-1"))
                    nodeMap.put((++index), new Node(index, attrib[i], input.get(attrib[i])));
            }

            int[] indexPoint = new int[1];
            indexPoint[0] = index;

            String tags = input.get("tags");
            if(!tags.equals("-1")){
                String[] tgs = tags.split(",");
                Arrays.stream(tgs).filter(s -> StringUtils.isNotBlank(s))
                        .forEach(s ->nodeMap.put((++indexPoint[0]), new Node(indexPoint[0], "Tag", s)) );
            }

            neighborVec.put(1,new TreeSet<>());
            neighborVec.put(2,new TreeSet<>());
            neighborVec.get(1).add(2);
            neighborVec.get(2).add(1);

            nodeMap.entrySet().stream().skip(2).parallel().forEach(entry -> {
                String type = entry.getValue().getType();
                if(type.equals("score") | type.equals("timespan") | type.equals("duration") | type.equals("price") | type.equals("Tag") | type.equals("seller")) {
                    //connect to gig
                    Set<Integer> vec = Stream.of(1).collect(Collectors.toSet());
                    neighborVec.put(entry.getKey(), vec);

                    neighborVec.get(1).add(entry.getKey());
                }

                if(type.equals("sale_effiency") | type.equals("education") | type.equals("certification") | type.equals("level") | type.equals("country") | type.equals("language")| type.equals("Language") | type.equals("avg_resp_time")){
                    //connect to seller
                    Set<Integer> vec = Stream.of(2).collect(Collectors.toSet());
                    neighborVec.put(entry.getKey(), vec);

                    neighborVec.get(2).add(entry.getKey());
                }
            });

            return new org.apache.commons.lang3.tuple.ImmutablePair<Map<Integer,Node>, Map<Integer,Set<Integer>>>(nodeMap,neighborVec);
        }
    }

    public static double delta(Node queryNode, Node targetNode,Map<Integer, Double> gigSim){//节点相似度
        if(targetNode==null)
            return 0;
        String queryType = queryNode.type;
        String targetType = targetNode.type;
        if( !queryType.equals(targetType) )
            return 0;
        String queryValue = queryNode.label;
        String targetValue = targetNode.label;
        if(queryType.equals("seller")){//
            return 1;
        }
        else if(queryType.equals("gig")){
            return gigSim.containsKey(targetNode.id) ? gigSim.get(targetNode.id): 0;
        }
        else if(queryType.equals("score")){//分数：1-10
            double sim = THRESHHOLD + 0.001*Math.abs(Double.parseDouble(targetValue) - Double.parseDouble(queryValue));
            return sim < TOP_SIMILIRITY ? sim : TOP_SIMILIRITY;
        }
        else if(queryType.equals("timespan")){//创建时长：单位 年
            double sim = THRESHHOLD + 0.001*Math.abs(Double.parseDouble(targetValue) - Double.parseDouble(queryValue));
            return sim < TOP_SIMILIRITY ? sim : TOP_SIMILIRITY;
        }
        else if(queryType.equals("duration")){//交付时间:单位 天
            double sim = THRESHHOLD + 0.01*Math.abs(Double.parseDouble(queryValue) - Double.parseDouble(targetValue));
            return sim < TOP_SIMILIRITY ? sim : TOP_SIMILIRITY;
        }
        else if(queryType.equals("price")){//价格
            double sim = THRESHHOLD + 0.01*Math.abs(Double.parseDouble(queryValue) - Double.parseDouble(targetValue.split("-")[1]));
            return sim < TOP_SIMILIRITY ? sim : TOP_SIMILIRITY;
        }
        else if(queryType.equals("sale_effiency")){//销售效率
            double sim = THRESHHOLD + 0.01*Math.abs(Double.parseDouble(targetValue) - Double.parseDouble(queryValue));
            return sim < TOP_SIMILIRITY ? sim : TOP_SIMILIRITY;
        }
        else if(queryType.equals("country")){//国家
            return targetValue.contains(queryValue) ? THRESHHOLD : 0;
        }
        else if(queryType.equals("language")){//语言
            return targetValue.contains(queryValue) ? THRESHHOLD : 0;
        }
        else if(queryType.equals("Language")){//语言
            return targetValue.contains(queryValue) ? THRESHHOLD : 0;
        }
        else if(queryType.equals("education")){//文化程度
            return !targetValue.equals("") ? THRESHHOLD : 0;
        }
        else if(queryType.equals("certification")){//证书
            return !targetValue.equals("") ? THRESHHOLD : 0;
        }
        else if(queryType.equals("level")){//等级
            return Integer.parseInt(targetValue) > 1 ? THRESHHOLD : 0;
        }
        else if(queryType.equals("avg_resp_time")){//回复时间
            double sim = THRESHHOLD + 0.01*Math.abs(Double.parseDouble(queryValue) - Double.parseDouble(targetValue));
            return sim < TOP_SIMILIRITY ? sim : TOP_SIMILIRITY;
        }
        else if(queryType.equals("Tag")){//关键字
            return targetValue.contains(queryValue) ? THRESHHOLD : 0;
        }
        else if(queryType.equals("tag")){//关键字
            int cnt = -1;
            for(String key : queryValue.split(";")){
                if(targetValue.contains(key))
                    cnt++;
            }
            double sim = THRESHHOLD + cnt*0.07;
            return sim < TOP_SIMILIRITY ? sim : TOP_SIMILIRITY;
        }
        return 0;
    }

    public static double distanceT(Integer n1, Integer n2,Map<Integer,Map<Integer,Double>> neighborVecOfTarget){//T节点距离(相关度)
        if(n1==null||neighborVecOfTarget.get(n1)==null)
            return 0;
        if(neighborVecOfTarget.get(n1).containsKey(n2))
            return neighborVecOfTarget.get(n1).get(n2);
        return 0;
    }

    public static double distanceQ(Integer n1, Integer n2,Map<Integer,Map<Integer,Double>> neighborVecOfQuery){//Q节点距离(相关度)
        if(n1==null||neighborVecOfQuery.get(n1)==null)
            return 0;
        if(neighborVecOfQuery.get(n1).containsKey(n2))
            return neighborVecOfQuery.get(n1).get(n2);
        return 0;
    }

    public static double delta_plus(double x, double y) {
        return x > y ? (x - y) : 0;
    }

    public double calc_W(Map<Integer, Map<Integer, Double>> U, Integer v, Integer u, Map<Integer,Map<Integer,Double>> neighborVecOfTarget, Map<Integer,Map<Integer,Double>> neighborVecOfQuery, Map<Integer, Set<Integer>> M){//partial inference cost for (v,u)
        if (neighborVecOfQuery.get(v) == null) {
            return 0;
        }
        Set<Integer> ngb = neighborVecOfQuery.get(v).keySet();
        double min = MAXNUM, sum = 0, tmp = 0;
        double beta = beta_v(v,neighborVecOfQuery);
        for(Integer v1 : ngb){//v邻居节点
            min = MAXNUM;
            for(Integer u1 : M.get(v1)){//v1候选集
                tmp = delta_plus(distanceQ(v, v1,neighborVecOfQuery),distanceT(u, u1,neighborVecOfTarget)) * beta + U.get(v1).get(u1);
                min = min > tmp ? tmp : min;
            }
            sum += min;
        }
        return sum > 1 ? Math.log(sum) : sum;
    }

    public double beta_v(Integer v,Map<Integer,Map<Integer,Double>> neighborVecOfQuery){
        double sum = 0;
        Iterator i = neighborVecOfQuery.get(v).entrySet().iterator();
        while (i.hasNext()) {
            Map.Entry entry = (Map.Entry) i.next();
            sum += (double)entry.getValue();
        }
        return 1/sum;
    }

    public JSONArray transferToJSON(List<Node> nodeList){
        JSONArray jarr = new JSONArray();
        try (NeoDriver neodriver = new NeoDriver()) {
            nodeList.stream()
                    .map(node ->neodriver.query("start n=node("+node.id+") RETURN n.id As id").single().get("id").asString())
                    .map(Integer::parseInt)
                    .map(this::fetchGig)
                    .filter(gig->gig!=null)
                    .forEach(jsonObject -> jarr.add(jsonObject));
        }
        return jarr;
    }

    public JSONObject fetchGig(int gid){
        try (Connection conn = dataSource.getConnection()) {
            String sql = "select title,score,timespan,duration,price_low,"
                    + "price_high,name,sale_effiency,country,language,"
                    + "education,certification,level,avg_resp_time "
                    + "from gig,user where id=seller_id and gig_id=?";
            PreparedStatement ps = conn.prepareStatement(sql);
            ps.setInt(1, gid);

            ResultSet ret = ps.executeQuery();
            if (ret.next()) {
                JSONObject json = new JSONObject();
                json.put("gid", gid);
                //json.put("index", index);
                json.put("title", ret.getString(1));
                json.put("score", ret.getString(2));
                json.put("timespan", ret.getString(3));
                json.put("duration", ret.getString(4));
                json.put("price", ret.getString(5));

                json.put("seller_name", ret.getString(7));
                json.put("sale_effiency", ret.getString(8));
                json.put("country", ret.getString(9));
                json.put("language", ret.getString(10));
                json.put("education", ret.getString(11));
                json.put("certification", ret.getString(12));
                json.put("level", ret.getString(13));
                json.put("avg_resp_time", ret.getString(14));

               return json;
            }

        } catch (SQLException e) {
            e.printStackTrace();
        }
        return null;
    }

    public void addRecord(software.lib.entity.Record record){
        List<RecordGig> gigList = record.gigList;
        recordDao.add(record);
        if(gigList!=null)
            gigList.stream().peek(recordGig -> recordGig.setRecord(record)).forEach(recordGig -> recordDao.add(recordGig));
    }

    public static final double TOP_SIMILIRITY = 0.7;

    @Autowired
    private DataSource dataSource;

    @Autowired
    private RecordDao recordDao;

    public void setDataSource(DataSource dataSource) {
        this.dataSource = dataSource;
    }

    public void setRecordDao(RecordDao recordDao) {
        this.recordDao = recordDao;
    }
}
