消息队列-kafka

zookeeper和kafka-0.8客户端请自行搜索...

kafka依赖  版本1.3.0RELEASE


				org.springframework.integration
				spring-integration-kafka
				1.3.0.RELEASE
				
					
						org.springframework
						*
					
				
			
			
				org.springframework
				spring-messaging
				${spring.version}
			
			
			
				org.apache.avro
				avro
				1.7.7
			


kafka配置

producer




	
	
		
	

	
	
	
	
		
	
	
	
		
			
				3600000
                5
				kafka.serializer.StringEncoder
				1
			
		
	
	
	

	
		
	
	
	
		
			
			
			<${kafka.broker.list}"
				key-serializer="stringSerializer" 
				value-class-type="java.lang.String"
				value-serializer="stringSerializer" 
				topic="otherTopic" />
		
	
	

consumer




	
	
		
	
	
	
	
		
	

	
	
	

	
		
			
				smallest
				10485760 
				5242880
				1000
			
		
	
	
	
	
	
	

	
	
		
	
		
			
				
				
				
			
		
	

package com.pay.kafka;

import java.util.Date;

import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.integration.kafka.support.KafkaHeaders;
import org.springframework.integration.support.MessageBuilder;
import org.springframework.messaging.MessageChannel;

public class KKProducer {

  private static Logger log = LoggerFactory.getLogger(KKProducer.class);

  private static MessageChannel channel;
  public void setChannel(MessageChannel channel) {
    KKProducer.channel = channel;
  }

  public static void kkMsgProducer(String topic, String strMsg) {
    log.debug(String.format("生产者kafka,topic:%s,消息:%s,时间:%s", topic, strMsg, new Date()));
    channel.send(MessageBuilder.withPayload(strMsg).setHeader(KafkaHeaders.TOPIC, topic).build());
  }
}

package com.pay.kafka;

import java.util.Collection;
import java.util.Iterator;
import java.util.LinkedHashMap;
import java.util.Map;
import java.util.Set;

import org.slf4j.Logger;
import org.slf4j.LoggerFactory;

public class KKConsumer {

  private static Logger log = LoggerFactory.getLogger(KKConsumer.class);

  public void kkMsgConsumer(Map> msgs) {
    for (Map.Entry> entry : msgs.entrySet()) {
      LinkedHashMap messages = (LinkedHashMap) entry.getValue();
      Set keys = messages.keySet();
      for (Integer i : keys) {
        log.debug("消费者kafka,Partition:{}", i);
      }
      Collection values = messages.values();
      for (Iterator iterator = values.iterator(); iterator.hasNext();) {
        String msg = iterator.next();
        log.debug("消费者kafka,topic:{},消息:{}", entry.getKey(), msg);
      }
    }
  }
}





你可能感兴趣的:(java,kafka,解耦,java)