kafka环境搭建与实战(1)安装kafka http://zilongzilong.iteye.com/blog/2267913 kafka环境搭建与实战(2)kafka API实战 http://zilongzilong.iteye.com/blog/2267924
1.maven项目中添加依赖
<!-- kafka --> <dependency> <groupId>org.apache.kafka</groupId> <artifactId>kafka-clients</artifactId> <version>0.9.0.0</version> </dependency>
2.spring集成kafka
与spring集成spring-kafka.xml
<?xml version="1.0" encoding="UTF-8"?> <beans xmlns="http://www.springframework.org/schema/beans" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xmlns:beans="http://www.springframework.org/schema/beans" xsi:schemaLocation=" http://www.springframework.org/schema/beans http://www.springframework.org/schema/beans/spring-beans.xsd"> <bean id="testProducer" class="org.apache.kafka.clients.producer.KafkaProducer"> <constructor-arg type="java.util.Properties"> <props> <prop key="bootstrap.servers">kafka0:9092,kafka1:9092,kafka2</prop> <prop key="acks">all</prop> <prop key="retries">0</prop> <prop key="batch.size">16384</prop> <prop key="linger.ms">1</prop> <prop key="buffer.memory">33554432</prop> <prop key="key.serializer">org.apache.kafka.common.serialization.StringSerializer</prop> <prop key="value.serializer">org.apache.kafka.common.serialization.StringSerializer</prop> <prop key="partitioner.class">com.***.kafka.Partitioner.RandomPartitioner</prop> </props> </constructor-arg> </bean> <bean id="group1Consumer" class="org.apache.kafka.clients.consumer.KafkaConsumer"> <constructor-arg type="java.util.Properties"> <props> <prop key="bootstrap.servers">kafka0:9092,kafka1:9092,kafka2:9092</prop> <prop key="group.id">group1</prop> <prop key="enable.auto.commit">true</prop> <prop key="auto.commit.interval.ms">1000</prop> <prop key="session.timeout.ms">30000</prop> <prop key="key.deserializer">org.apache.kafka.common.serialization.StringDeserializer</prop> <prop key="value.deserializer">org.apache.kafka.common.serialization.StringDeserializer</prop> </props> </constructor-arg> </bean> <bean id="group2Consumer" class="org.apache.kafka.clients.consumer.KafkaConsumer"> <constructor-arg type="java.util.Properties"> <props> <prop key="bootstrap.servers">kafka0:9092,kafka1:9092,kafka2:9092</prop> <prop key="group.id">group2</prop> <prop key="enable.auto.commit">true</prop> <prop key="auto.commit.interval.ms">1000</prop> <prop key="session.timeout.ms">30000</prop> <prop key="key.deserializer">org.apache.kafka.common.serialization.StringDeserializer</prop> <prop key="value.deserializer">org.apache.kafka.common.serialization.StringDeserializer</prop> </props> </constructor-arg> </bean> </beans>
3.producer使用
自己的partion策略类
public class RandomPartitioner implements Partitioner { @Override public void configure(Map<String, ?> configs) { } @Override public int partition(String topic, Object key, byte[] keyBytes, Object value, byte[] valueBytes, Cluster cluster) { Random random = new Random(); List<PartitionInfo> partitions = cluster.partitionsForTopic(topic); int numPartitions = partitions.size(); if (numPartitions > 0) { return Math.abs(random.nextInt()) % numPartitions; } else { return 0; } } @Override public void close() { } }
public class ProducerUtil { private static Producer<String, String> producer = SpringContextHolder.getBean("testProducer"); public static void produce(String message) { producer.send(new ProducerRecord<String, String>("test",message)); } }
4.consumer使用
public class KafkaServletContextListener implements ServletContextListener { @Override public void contextInitialized(ServletContextEvent sce) { ExecutorService executor = Executors.newFixedThreadPool(2); executor.execute(new Runnable() { @Override public void run() { KafkaConsumer<String, String> consumer = SpringContextHolder.getBean("group1Consumer"); consumer.subscribe(Arrays.asList("test")); while (true) { ConsumerRecords<String, String> records = consumer.poll(100); for (ConsumerRecord<String, String> record : records) { record.key(); record.offset(); record.partition(); record.topic(); record.value(); //TODO } } } }); executor.execute(new Runnable() { @Override public void run() { KafkaConsumer<String, String> consumer = SpringContextHolder.getBean("group2Consumer"); consumer.subscribe(Arrays.asList("test")); while (true) { ConsumerRecords<String, String> records = consumer.poll(100); for (ConsumerRecord<String, String> record : records) { record.key(); record.offset(); record.partition(); record.topic(); record.value(); //TODO } } } }); } @Override public void contextDestroyed(ServletContextEvent sce) { } }
在web.xml中添加listener
<listener> <listener-class>com.***.web.listener.KafkaServletContextListener</listener-class> </listener>