package com.sinosoft.service8;

import java.io.BufferedWriter;
import java.io.File;
import java.io.FileOutputStream;
import java.io.FileWriter;
import java.io.IOException;
import java.io.OutputStreamWriter;
import java.net.MalformedURLException;
import java.net.URL;
import java.text.SimpleDateFormat;
import java.util.Date;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.Properties;

import javax.xml.namespace.QName;
import javax.xml.ws.Service;

import kafka.consumer.Consumer;
import kafka.consumer.ConsumerConfig;
import kafka.consumer.ConsumerIterator;
import kafka.consumer.KafkaStream;
import kafka.javaapi.consumer.ConsumerConnector;
import kafka.message.MessageAndMetadata;

import com.sinosoft.xmlTransformer.XmlToString;



public class IMyServiceImpl1 implements Runnable {
	private String topic;
	private String groupId;
	private String xmlkey;
	private String xmlvalue;
	private String url;
	private String namespace;
	private String method;
	private String zookeeperConnect;
	IMyService imsp;
	int count1=0;
	int count2=0;
	File file1=new File("output1.text");
	File file2=new File("output2.text");
	
	public void counsumeList() {
		Properties prop = new Properties();
		Properties prop1 = new Properties();
		try {
			// 加载消费者的配置
			prop.load(IMyServiceImpl1.class.getClassLoader().getResourceAsStream(
					"consumer.properties"));
			// 加载topic的配置
			prop1.load(IMyServiceImpl1.class.getClassLoader().getResourceAsStream(
					"topic.properties"));
		} catch (IOException e) {
			e.printStackTrace();
		}
		ConsumerConfig config = new ConsumerConfig(prop);
		// 创建消费者链接
		ConsumerConnector consumerConnector = Consumer
				.createJavaConsumerConnector(config);

		String topic = prop1.getProperty("topic");// 主题
		Integer count = 1;// 表示这个主题的消费者数量
		Map<String, Integer> topicCountMap = new HashMap<String, Integer>();
		topicCountMap.put(topic, count);
		// 创建一个消费数据的数据流
		Map<String, List<KafkaStream<byte[], byte[]>>> messageStreams = consumerConnector
				.createMessageStreams(topicCountMap);
		// 获取主题对应的消费者数据量
		List<KafkaStream<byte[], byte[]>> list = messageStreams.get("test");
		// 获取这一个消费者的数据量
		KafkaStream<byte[], byte[]> kafkaStream = list.get(0);

		ConsumerIterator<byte[], byte[]> iterator = kafkaStream.iterator();
		while (iterator.hasNext()) {
			MessageAndMetadata<byte[], byte[]> next = iterator.next();
			String message = new String(next.message());
			String key = "";
			if (next.key() != null) {
				key = new String(next.key());
			}
			int partition = next.partition();
			long offset = next.offset();
			System.out.println("消息所在的分区：" + partition + ",消息的key：" + key
					+ ",消息的offset：" + offset + ",消息内容：" + message + "");
		}
	}
	@Override
	public void run() {
		
		while(true){
		KafkaStream<byte[], byte[]> kafkaStream = ConnectTopic(topic,
				zookeeperConnect, groupId);

		ConsumerIterator<byte[], byte[]> iterator = kafkaStream.iterator();
		while (iterator.hasNext()) {
			MessageAndMetadata<byte[], byte[]> next = iterator.next();
			String message= new String(next.message());
			String key = "";
			if (next.key() != null) {
				key = new String(next.key());
			}
			int partition = next.partition();
			long offset = next.offset();
			Date date=new Date();
			SimpleDateFormat sdf = new SimpleDateFormat("yyyy-MM-dd HH:mm:ss");
			String dt=sdf.format(date);
			String input = partition + "-split-" + key
					+ "-split-" + offset + "-split-" + message + "-split-" +dt + "\n-----split-----\n";
			
			
			WriteLog(offset, dt, input);
			SendMessage(message);
		}
			}
			
//			System.out.println("消息所在的分区：" + partition + ",消息的key：" + key
//					+ ",消息的offset：" + offset + ",消息内容：" + message + "");
	
		
	
		

	}
	
	
	public IMyServiceImpl1(String topic, String groupId, String xmlkey,
			String xmlvalue, String url, String namespace, String method,
			String zookeeperConnect) {
		this.topic = topic;
		this.groupId = groupId;
		this.xmlkey = xmlkey;
		this.xmlvalue = xmlvalue;
		this.url = url;
		this.namespace = namespace;
		this.method = method;
		this.zookeeperConnect = zookeeperConnect;
	}
	private void SendMessage(String createXml) {
		QName qname = new QName(namespace, method);
		Service service;
		try {
			service = Service.create(new URL(url), qname);
			imsp = service.getPort(IMyService.class);

			XmlFilter filter=new XmlFilterImpl();
			//过滤方法
			if(filter.xmlFilter(xmlkey, xmlvalue,createXml)==true){
				
					//向服务端传xml
					boolean acceptMessage = imsp.acceptMessage(createXml);
					//向文件中写是否成功布尔值
					File oAndF=new File("oAndF.text");
					if(oAndF.length()!=0){
					 BufferedWriter out = null;     
				        try {     
				            out = new BufferedWriter(new OutputStreamWriter(new FileOutputStream(oAndF, true)));     
				            out.append("-----split-----"+acceptMessage+"\n");     
				        } catch (Exception e) {     
				            e.printStackTrace();     
				        } finally {     
				            try {     
				                if(out != null){  
				                    out.close();     
				                }  
				            } catch (IOException e) {     
				                e.printStackTrace();     
				            }     
				        }
				        }}
		
			
		} catch (MalformedURLException e) {
		
			e.printStackTrace();
		}
	}
	private void WriteLog(long offset, String dt, String input) {
		if(!file1.exists()){
			try {
				file1.createNewFile();
			} catch (IOException e) {
				// TODO Auto-generated catch block
				e.printStackTrace();
			}
		}
		if(!file2.exists()){
			try {
				file2.createNewFile();
			} catch (IOException e) {
				// TODO Auto-generated catch block
				e.printStackTrace();
			}
		}
		
		
		if(file1.exists()&&file2.exists()){
		FileWriter filewriter = null;
		BufferedWriter out = null;
		try {
			if(count1<100&&count2<100){
			filewriter = new FileWriter(file1,true);
			BufferedWriter bw1=new BufferedWriter(filewriter);
			bw1.append(input);
			count1++;
			bw1.flush();
			}
			if(count1==100){
				if(count2<100){
				filewriter = new FileWriter(file2,true);
				BufferedWriter bw2=new BufferedWriter(filewriter);
				bw2.append(input);
				count2++;
				bw2.flush();
				}
				if(count2==100&&count1==100){
					filewriter = new FileWriter(file1,true);
					BufferedWriter bw1=new BufferedWriter(filewriter);
					bw1.write("");
					bw1.append(input);
					count1=0;
					bw1.flush();
					
				}
				
			}
			if(count2==100){
				if(count1<100){
					filewriter = new FileWriter(file1,true);
					BufferedWriter bw1=new BufferedWriter(filewriter);
					bw1.append(input);
					count1++;
					bw1.flush();
					
				}
				if(count1==100&&count2==100){
					filewriter = new FileWriter(file2,true);
					BufferedWriter bw2=new BufferedWriter(filewriter);
					bw2.write("");
					bw2.append(input);
					count2=0;
					bw2.flush();
					
				}
			}
			File oAndF=new File("oAndF.text");
			if(!oAndF.exists()){
				oAndF.createNewFile();
			}
			
			     
		      
		        out = new BufferedWriter(new OutputStreamWriter(new FileOutputStream(oAndF, true)));     
		        out.write("");
		        out.append(""+offset+"-----split-----"+dt);     
		      
		             
		            
		}catch(Exception e){
			e.printStackTrace();
		}
		finally{
			if(out != null){  
		        try {
					out.close();
				} catch (IOException e) {
					// TODO Auto-generated catch block
					e.printStackTrace();
				}     
		    
		    }
      } 
			}
	}
	private KafkaStream<byte[], byte[]> ConnectTopic(String topic,
			String zookeeper_connect, String group_id) {
		Properties prop = new Properties();
		prop.put("zookeeper.connect",zookeeper_connect);
        prop.put("group.id",group_id);
        prop.put("zookeeper.session.timeout.ms", "6000");
        prop.put("auto.commit.enable","true");
        prop.put("auto.commit.interval.ms", "1000");
//		Properties prop1 = new Properties();
        /*
         * 
         * run(
         * t = connect_topic();
         * while (true ) {
         * 	  msg = get_msg(t );
         *    while (msg.hasnext()) {
         *        filter(msg);
         *        log_msg(msg);
         *        send_msg(msg);  
         *  
         * }
         * 
         * 
         * 
         * 
         */
		
			// 加载消费者的配置
//			prop.load(IMyServiceImpl.class.getClassLoader().getResourceAsStream(
//					"consumer.properties"));
			// 加载topic的配置
//			prop1.load(IMyServiceImpl.class.getClassLoader().getResourceAsStream(
//					"topic.properties"));
		
		ConsumerConfig config = new ConsumerConfig(prop);
		// 创建消费者链接
		ConsumerConnector consumerConnector = Consumer
				.createJavaConsumerConnector(config);

		//String topic = prop1.getProperty("topic");// 主题
		Integer count = 1;// 表示这个主题的消费者数量
		Map<String, Integer> topicCountMap = new HashMap<String, Integer>();
		topicCountMap.put(topic, count);
		// 创建一个消费数据的数据流
		Map<String, List<KafkaStream<byte[], byte[]>>> messageStreams = consumerConnector
				.createMessageStreams(topicCountMap);
		// 获取主题对应的消费者数据量
		List<KafkaStream<byte[], byte[]>> list = messageStreams.get(topic);
		// 获取这一个消费者的数据量
		KafkaStream<byte[], byte[]> kafkaStream = list.get(0);
		return kafkaStream;
	}
}
