package com.bff.gaia.mix.api.xjoin.client.rdb.all;/*
 * Licensed to the Apache Software Foundation (ASF) under one or more
 * contributor license agreements.  See the NOTICE file distributed with
 * this work for additional information regarding copyright ownership.
 * The ASF licenses this file to You under the Apache License, Version 2.0
 * (the "License"); you may not use this file except in compliance with
 * the License.  You may obtain a copy of the License at
 *
 *    http://www.apache.org/licenses/LICENSE-2.0
 *
 * Unless required by applicable law or agreed to in writing, software
 * distributed under the License is distributed on an "AS IS" BASIS,
 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
 * See the License for the specific language governing permissions and
 * limitations under the License.
 */


import org.apache.commons.collections.CollectionUtils;
import com.bff.gaia.api.java.typeutils.RowTypeInfo;
import com.bff.gaia.mix.api.xjoin.core.enums.EJoinType;
import com.bff.gaia.mix.api.xjoin.client.rdb.utils.SwitchUtil;
import com.bff.gaia.mix.api.xjoin.core.side.AllReqRow;
import com.bff.gaia.mix.api.xjoin.core.side.FieldInfo;
import com.bff.gaia.mix.api.xjoin.core.side.JoinInfo;
import com.bff.gaia.mix.api.xjoin.core.side.SideTableInfo;
import com.bff.gaia.mix.api.xjoin.client.rdb.table.RdbSideTableInfo;
import com.bff.gaia.shaded.guava18.com.google.common.collect.Lists;
import com.bff.gaia.shaded.guava18.com.google.common.collect.Maps;
import com.bff.gaia.types.Row;
import com.bff.gaia.util.Collector;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;

import java.sql.*;
import java.util.Calendar;
import java.util.List;
import java.util.Map;
import java.util.concurrent.atomic.AtomicReference;

/**
 * Project Name: gaia-parent
 * Description:
 * Data: 2019/7/19 17:45
 *
 * @author tiger
 * @version v1.0
 */
public abstract class RdbAllReqRow extends AllReqRow {

	private static final long serialVersionUID = 2098635140857937718L;

	private static final Logger LOG = LoggerFactory.getLogger(RdbAllReqRow.class);

	private static final int CONN_RETRY_NUM = 3;

	private AtomicReference<Map<String, List<Map<String, Object>>>> cacheRef = new AtomicReference<>();

	public RdbAllReqRow(RowTypeInfo rowTypeInfo, JoinInfo joinInfo, List<FieldInfo> outFieldInfoList, SideTableInfo sideTableInfo) {
		super(new RdbAllSideInfo(rowTypeInfo, joinInfo, outFieldInfoList, sideTableInfo));
	}

	@Override
	public Row fillData(Row input, Object sideInput) {
		Map<String, Object> cacheInfo = (Map<String, Object>) sideInput;
		Row row = new Row(sideInfo.getOutFieldInfoList().size());
		System.out.println(sideInfo.getOutFieldInfoList().size());
		for (Map.Entry<Integer, Integer> entry : sideInfo.getInFieldIndex().entrySet()) {
			Object obj = input.getField(entry.getValue());
//			boolean isTimeIndicatorTypeInfo = TimeIndicatorTypeInfo.class.isAssignableFrom(xjoinInfo.getRowTypeInfo().getTypeAt(entry.getValue()).getClass());
			boolean isTimeIndicatorTypeInfo = false;
			//Type information for indicating event or processing time. However, it behaves like a regular SQL timestamp but is serialized as Long.
			if (obj instanceof Timestamp && isTimeIndicatorTypeInfo) {
				obj = ((Timestamp) obj).getTime();
			}

			row.setField(entry.getKey(), obj);
		}

		for (Map.Entry<Integer, String> entry : sideInfo.getSideFieldNameIndex().entrySet()) {
			if (cacheInfo == null) {
				row.setField(entry.getKey(), null);
			} else {
				row.setField(entry.getKey(), cacheInfo.get(entry.getValue()));
			}
		}




		return Row.project(row, sideInfo.getOutFieldIndexs());
	}

	@Override
	protected void initCache() throws SQLException {
		Map<String, List<Map<String, Object>>> newCache = Maps.newConcurrentMap();
		cacheRef.set(newCache);
		loadData(newCache);
	}

	@Override
	protected void reloadCache() {
		//reload cacheRef and replace to old cacheRef
		Map<String, List<Map<String, Object>>> newCache = Maps.newConcurrentMap();
		try {
			loadData(newCache);
		} catch (SQLException e) {
			LOG.error("", e);
		}

		cacheRef.set(newCache);
		LOG.info("----- rdb all cacheRef reload end:{}", Calendar.getInstance());
	}


	@Override
	public void flatMap(Row value, Collector<Row> out) throws Exception {
//		System.out.println(this.getClass().toString()+" "+value);
		List<Object> inputParams = Lists.newArrayList();
		for (Integer conValIndex : sideInfo.getEqualValIndex()) {
			Object equalObj = value.getField(conValIndex);
//			System.out.println(equalObj);
			if (equalObj == null) {
				out.collect(null);
			}

			inputParams.add(equalObj);
		}

		String key = buildKey(inputParams);
		List<Map<String, Object>> cacheList = cacheRef.get().get(key);
		if (CollectionUtils.isEmpty(cacheList)) {
			if (sideInfo.getJoinType() == EJoinType.LEFT) {
				Row row = fillData(value, null);
				out.collect(Row.project(row, sideInfo.getOutFieldIndexs()));
			} else {
				return;
			}

			return;
		}

		for (Map<String, Object> one : cacheList) {
			out.collect(Row.project(fillData(value, one), sideInfo.getOutFieldIndexs()));
		}
	}

	private String buildKey(List<Object> equalValList) {
		StringBuilder sb = new StringBuilder("");
		for (Object equalVal : equalValList) {
			sb.append(equalVal).append("_");
		}

		return sb.toString();
	}

	private String buildKey(Map<String, Object> val, List<String> equalFieldList) {
		StringBuilder sb = new StringBuilder("");
		for (String equalField : equalFieldList) {
			sb.append(val.get(equalField)).append("_");
		}

		return sb.toString();
	}

	public abstract Connection getConn(String dbURL, String userName, String password);


	private void loadData(Map<String, List<Map<String, Object>>> tmpCache) throws SQLException {
		RdbSideTableInfo tableInfo = (RdbSideTableInfo) sideInfo.getSideTableInfo();
		Connection connection = null;

		try {
			for (int i = 0; i < CONN_RETRY_NUM; i++) {

				try {
					connection = getConn(tableInfo.getUrl(), tableInfo.getUserName(), tableInfo.getPassword());
					break;
				} catch (Exception e) {
					if (i == CONN_RETRY_NUM - 1) {
						throw new RuntimeException("", e);
					}

					try {
						String connInfo = "url:" + tableInfo.getUrl() + ";userName:" + tableInfo.getUserName() + ",pwd:" + tableInfo.getPassword();
						LOG.warn("get conn fail, wait for 5 sec and try again, connInfo:" + connInfo);
						Thread.sleep(5 * 1000);
					} catch (InterruptedException e1) {
						e1.printStackTrace();
					}
				}

			}

			//load data from table
			String sql = sideInfo.getSqlCondition();
			Statement statement = connection.createStatement();
			statement.setFetchSize(getFetchSize());
			ResultSet resultSet = statement.executeQuery(sql);
			String[] sideFieldNames = sideInfo.getSideSelectFields().split(",");
			String[] fields = sideInfo.getSideTableInfo().getFieldTypes();
			while (resultSet.next()) {
				Map<String, Object> oneRow = Maps.newHashMap();
				for (String fieldName : sideFieldNames) {
					Object object = resultSet.getObject(fieldName.trim());
					int fieldIndex = sideInfo.getSideTableInfo().getFieldList().indexOf(fieldName.trim());
					object = SwitchUtil.getTarget(object, fields[fieldIndex]);
					oneRow.put(fieldName.trim(), object);
				}

				String cacheKey = buildKey(oneRow, sideInfo.getEqualFieldList());
//				System.out.println(cacheKey + " " + oneRow);
				List<Map<String, Object>> list = tmpCache.computeIfAbsent(cacheKey, key -> Lists.newArrayList());
				list.add(oneRow);
			}
		} catch (Exception e) {
			LOG.error("", e);
		} finally {
			if (connection != null) {
				connection.close();
			}
		}
	}

	public int getFetchSize() {
		return 1000;
	}
}