package com.evenmo.service;

import java.util.HashMap;
import java.util.List;
import java.util.Map;

import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.stereotype.Service;

import com.evenmo.mapper.BigDataMapper;
import com.evenmo.mapper.BigDataMapper2;
import com.evenmo.pojo.BigData;
import com.evenmo.pojo.LayUIResultMap;
import com.github.pagehelper.PageHelper;
import com.github.pagehelper.PageInfo;

@Service(value = "bigDataService")
public class BigDataServiceImpl implements BigDataService {
	@Autowired
	private BigDataMapper bigDataDao;
	@Autowired
	private BigDataMapper2 bigDataDao2;
	private int insertNums=100;
	//普通方式，通过循环持续插入，效率低下，消耗数据库连接资源
	@Override
	public Long insertDatasAlots() {
		long start = System.currentTimeMillis();
		
		List<BigData> queryBigDatas = bigDataDao.queryBigDatas();
		// 插入1000行数据
		queryBigDatas.forEach(data ->bigDataDao.insertDatas(data));//282972毫秒--282.972秒
		
		long end = System.currentTimeMillis();
		System.out.println("插入了" + queryBigDatas.size() + "行数据");
		return end - start;
	}

	@Override
	public Long insertDatasAlotsQuickly() {
		
		PageHelper.startPage(1, insertNums);
		List<BigData> queryBigDatas = bigDataDao.queryBigDatas();
		
		int size=queryBigDatas.size();
		long start = System.currentTimeMillis();
		int index=0;
		int piciCount=0;
		//分段批量插入，每次插入2600条数据，根据当前jvm测得的数量
		while(true) {
			if(index+2600>=size) {
				System.out.println("全部插入完毕！");
				bigDataDao.insertDatasQuickly(queryBigDatas.subList(index, size));//sublist集合中的子集
				piciCount++;
				break;
			}else {
				bigDataDao.insertDatasQuickly(queryBigDatas.subList(index, index+2600));//sublist集合中的子集
				index+=2600;
				piciCount++;
				System.out.println("此批次已插入！");
			}
		}

		long end = System.currentTimeMillis();
		Long res=end-start;
		System.out.println("插入了" + size + "条数据，分"+piciCount+"批次完成");
		System.out.println("共花费了"+res+"毫秒");
		return res;
	}
	
//	mapper.xml方式
	@Override
	public Long insertDatasAlotsQuickly2() {
		
		PageHelper.startPage(1, insertNums);
		//得到要插入的总数据
		List<BigData> queryBigDatas = bigDataDao.queryBigDatas();
		
		int size=queryBigDatas.size();
		long start = System.currentTimeMillis();
		int index=0;
		int piciCount=0;
		//分段批量插入，每次插入2600条数据，根据当前jvm测得的数量
		while(true) {
			if(index+2600>=size) {
				
				bigDataDao2.insertdataquick(queryBigDatas.subList(index, size));
				System.out.println("全部插入完毕！");
				piciCount++;
				break;
			}else {
				System.out.println("此批次已插入！");
				bigDataDao2.insertdataquick(queryBigDatas.subList(index, index+2600));
				index+=2600;
				piciCount++;
			}
		}
		
		
		long end = System.currentTimeMillis();
		Long res=end-start;
		System.out.println("插入了" + size + "条数据，分"+piciCount+"批次完成");
		System.out.println("共花费了"+res+"毫秒");
		return res;
	}
	//list
//	@Override
//	public List<BigData> queryBigDatasByIds() {
//		List<BigData> queryBigDatas = bigDataDao.queryBigDatas();
//		List<Integer> i=new ArrayList<Integer>();
//		queryBigDatas.forEach(d->i.add(d.getId()));
//		List<BigData> datas = bigDataDao.queryBigDatasByIds(i);
//		return datas;
//	}
	
	//map
	@Override
	public List<BigData> queryBigDatasByIds() {
		List<BigData> queryBigDatas = bigDataDao.queryBigDatas();
		Map<String,Object> map=new HashMap<String,Object>();
		int[] a= {1,2,3,4,5};
		map.put("ids", "1,2,3,4,5,6");
		map.put("hh","5");
//		List<BigData> datas = bigDataDao2.test(map);
		List<BigData> datas = bigDataDao.queryBigDatasByIds(map);
		
		
		
		return datas;
	}

	@Override
	public int updateBatch() {
//		PageHelper.startPage(2600, 1000);
		List<BigData> queryBigDatas = bigDataDao.queryBigDatas();
		int size=queryBigDatas.size();
		int piciCount=0;
		int index=0;
		long start=System.currentTimeMillis();
		while(true) {
			if(index+2600>=size) {
				
				bigDataDao.updateBatch(queryBigDatas.subList(index, size));
				piciCount++;
				System.out.println("第"+piciCount+"批次更新完毕！");
				break;
			}else {
				bigDataDao.updateBatch(queryBigDatas.subList(index, index+2600));
				piciCount++;
				index+=2600;
				System.out.println("第"+piciCount+"批次更新完毕！");
			}
		
		}
		long end=System.currentTimeMillis();
		System.out.println("更新"+size+"条数据，共分"+piciCount+"批次更新完毕,用时"+(end-start)+"毫秒");

		return size;
	}

	
	@Override
	public int updateBatch2() {
		PageHelper.startPage(0, 3);
		List<BigData> queryBigDatas = bigDataDao.queryBigDatas();
		int size=queryBigDatas.size();
		int piciCount=0;
		int index=0;
		long start=System.currentTimeMillis();
		while(true) {
			if(index+2600>=size) {
				
				bigDataDao2.updatebatch2(queryBigDatas.subList(index, size));
				piciCount++;
				System.out.println("第"+piciCount+"批次更新完毕！");
				break;
			}else {
				bigDataDao2.updatebatch2(queryBigDatas.subList(index, index+2600));
				piciCount++;
				index+=2600;
				System.out.println("第"+piciCount+"批次更新完毕！");
			}
		
		}
		long end=System.currentTimeMillis();
		System.out.println("更新"+size+"条数据，共分"+piciCount+"批次更新完毕,用时"+(end-start)+"毫秒");

		return size;
	}
	
	public LayUIResultMap<BigData> queryDatasBy(Map<String,Object> map){
		Integer pageNum = (Integer) map.get("pageNum");
		Integer pageSize = (Integer) map.get("pageSize");
		String condition = (String) map.get("search");
		System.out.println("conditon="+condition);
		if(condition==""||condition==null) 
		{
			condition="%";
		}
		System.out.println("conditon="+condition);
		//日志
		System.out.println(pageNum+":"+pageSize);
		PageHelper.startPage(pageNum,pageSize);
		BigData data=new BigData();
		data.setName("test");
		System.out.println(data);
		List<BigData> bigDatas = bigDataDao.queryBigDatas2(data);
		LayUIResultMap<BigData> result =new LayUIResultMap<BigData>();
		
		PageInfo info= new PageInfo(bigDatas);
		System.out.println(info);
		long total = info.getTotal();
		result.setData(bigDatas);
		result.setCount(total);
		return result;
	}

}
