package com.surfilter.massdata.spark.output.ipcheck;

import java.sql.SQLException;
import java.util.List;
import java.util.Map;

import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.spark.sql.DataFrame;
import org.apache.spark.sql.Row;

import com.act.sparkanalyz.output.impl.HiveOutput;

public class IpCheckConlinctOut extends HiveOutput {

	/**
	 * 
	 */
	private static final long serialVersionUID = 1L;
	private static final Log log = LogFactory.getLog(IpCheckConlinctOut.class);
	private String columns;
	private String tableName;
	private String values;
	@Override
	public void write(DataFrame result, Map<String, String> commandMap) {
		log.info("===========IpCheckConlinctOut begin============");
		List<Row> list = result.collectAsList();
//		try {
//			insertTable(list);
//		} catch (SQLException e) {
//			// TODO Auto-generated catch block
//			e.printStackTrace();
//		}
		log.info("===========IpCheckConlinctOut end============");
	}

}
