序
本文主要簡單梳理梳理java應用中生產/消費kafka消息的一些使用選擇。
可用類庫
- kafka client
- spring for apache kafka
- spring integration kafka
- spring cloud stream binder kafka
基於java版的kafka client與spring進行集成
<dependency> <groupId>org.springframework.kafka</groupId> <artifactId>spring-kafka</artifactId> <version>1.2.2.RELEASE</version> </dependency>
與springboot的集成
對於springboot 1.5版本之前的話,需要自己去配置java configuration,而1.5版本以後則提供了auto config,具體詳見org.springframework.boot.autoconfigure.kafka這個包,主要有
- KafkaAutoConfiguration spring-boot-autoconfigure-1.5.7.RELEASE-sources.jar!/org/springframework/boot/autoconfigure/kafka/KafkaAutoConfiguration.java
@Configuration @ConditionalOnClass(KafkaTemplate.class) @EnableConfigurationProperties(KafkaProperties.class) @Import(KafkaAnnotationDrivenConfiguration.class) public class KafkaAutoConfiguration { private final KafkaProperties properties; public KafkaAutoConfiguration(KafkaProperties properties) { this.properties = properties; } @Bean @ConditionalOnMissingBean(KafkaTemplate.class) public KafkaTemplate<?, ?> kafkaTemplate( ProducerFactory<Object, Object> kafkaProducerFactory, ProducerListener<Object, Object> kafkaProducerListener) { KafkaTemplate<Object, Object> kafkaTemplate = new KafkaTemplate<Object, Object>( kafkaProducerFactory); kafkaTemplate.setProducerListener(kafkaProducerListener); kafkaTemplate.setDefaultTopic(this.properties.getTemplate().getDefaultTopic()); return kafkaTemplate; } @Bean @ConditionalOnMissingBean(ProducerListener.class) public ProducerListener<Object, Object> kafkaProducerListener() { return new LoggingProducerListener<Object, Object>(); } @Bean @ConditionalOnMissingBean(ConsumerFactory.class) public ConsumerFactory<?, ?> kafkaConsumerFactory() { return new DefaultKafkaConsumerFactory<Object, Object>( this.properties.buildConsumerProperties()); } @Bean @ConditionalOnMissingBean(ProducerFactory.class) public ProducerFactory<?, ?> kafkaProducerFactory() { return new DefaultKafkaProducerFactory<Object, Object>( this.properties.buildProducerProperties()); } }
- KafkaAnnotationDrivenConfiguration spring-boot-autoconfigure-1.5.7.RELEASE-sources.jar!/org/springframework/boot/autoconfigure/kafka/KafkaAnnotationDrivenConfiguration.java
@Configuration @ConditionalOnClass(EnableKafka.class) class KafkaAnnotationDrivenConfiguration { private final KafkaProperties properties; KafkaAnnotationDrivenConfiguration(KafkaProperties properties) { this.properties = properties; } @Bean @ConditionalOnMissingBean public ConcurrentKafkaListenerContainerFactoryConfigurer kafkaListenerContainerFactoryConfigurer() { ConcurrentKafkaListenerContainerFactoryConfigurer configurer = new ConcurrentKafkaListenerContainerFactoryConfigurer(); configurer.setKafkaProperties(this.properties); return configurer; } @Bean @ConditionalOnMissingBean(name = "kafkaListenerContainerFactory") public ConcurrentKafkaListenerContainerFactory<?, ?> kafkaListenerContainerFactory( ConcurrentKafkaListenerContainerFactoryConfigurer configurer, ConsumerFactory<Object, Object> kafkaConsumerFactory) { ConcurrentKafkaListenerContainerFactory<Object, Object> factory = new ConcurrentKafkaListenerContainerFactory<Object, Object>(); configurer.configure(factory, kafkaConsumerFactory); return factory; } @EnableKafka @ConditionalOnMissingBean(name = KafkaListenerConfigUtils.KAFKA_LISTENER_ANNOTATION_PROCESSOR_BEAN_NAME) protected static class EnableKafkaConfiguration { } }
- ConcurrentKafkaListenerContainerFactoryConfigurer spring-boot-autoconfigure-1.5.7.RELEASE-sources.jar!/org/springframework/boot/autoconfigure/kafka/ConcurrentKafkaListenerContainerFactoryConfigurer.java
public class ConcurrentKafkaListenerContainerFactoryConfigurer { private KafkaProperties properties; /** * Set the {@link KafkaProperties} to use. * @param properties the properties */ void setKafkaProperties(KafkaProperties properties) { this.properties = properties; } /** * Configure the specified Kafka listener container factory. The factory can be * further tuned and default settings can be overridden. * @param listenerContainerFactory the {@link ConcurrentKafkaListenerContainerFactory} * instance to configure * @param consumerFactory the {@link ConsumerFactory} to use */ public void configure( ConcurrentKafkaListenerContainerFactory<Object, Object> listenerContainerFactory, ConsumerFactory<Object, Object> consumerFactory) { listenerContainerFactory.setConsumerFactory(consumerFactory); Listener container = this.properties.getListener(); ContainerProperties containerProperties = listenerContainerFactory .getContainerProperties(); if (container.getAckMode() != null) { containerProperties.setAckMode(container.getAckMode()); } if (container.getAckCount() != null) { containerProperties.setAckCount(container.getAckCount()); } if (container.getAckTime() != null) { containerProperties.setAckTime(container.getAckTime()); } if (container.getPollTimeout() != null) { containerProperties.setPollTimeout(container.getPollTimeout()); } if (container.getConcurrency() != null) { listenerContainerFactory.setConcurrency(container.getConcurrency()); } } }
創建併發的多個KafkaMessageListenerContainer,相當於一個應用實例創建多個consumer 如果是1.5版本及以上的springboot,使用起來就比較簡單了,注入kafkaTemplate直接發消息,然後簡單配置一下就可以消費消息
spring integration kafka
spring integration是spring關於Enterprise Integration Patterns的實現,而spring integration kafka則基於spring for apache kafka提供了inbound以及outbound channel的適配器 Starting from version 2.0 version this project is a complete rewrite based on the new spring-kafka project which uses the pure java Producer and Consumer clients provided by Kafka 0.9.x.x and 0.10.x.x
這個的話,沒有自動配置,又引入了integration相關的概念,整體來講,相對複雜一些。
consumer配置
@Bean public KafkaMessageListenerContainer<String, String> container( ConsumerFactory<String, String> kafkaConsumerFactory) { return new KafkaMessageListenerContainer<>(kafkaConsumerFactory, new ContainerProperties(new TopicPartitionInitialOffset(topic, 0))); } @Bean public ConsumerFactory<?, ?> kafkaConsumerFactory() { Map<String, Object> props = new HashMap<>(); props.put(ConsumerConfig.BOOTSTRAP_SERVERS_CONFIG, brokerAddress); props.put(ConsumerConfig.GROUP_ID_CONFIG, consumerGroup); props.put(ConsumerConfig.ENABLE_AUTO_COMMIT_CONFIG, true); props.put(ConsumerConfig.AUTO_COMMIT_INTERVAL_MS_CONFIG, 100); props.put(ConsumerConfig.SESSION_TIMEOUT_MS_CONFIG, 15000); props.put(ConsumerConfig.AUTO_OFFSET_RESET_CONFIG,"earliest"); props.put(ConsumerConfig.KEY_DESERIALIZER_CLASS_CONFIG, StringDeserializer.class); props.put(ConsumerConfig.VALUE_DESERIALIZER_CLASS_CONFIG, StringDeserializer.class); return new DefaultKafkaConsumerFactory<>(props); } @Bean public KafkaMessageDrivenChannelAdapter<String, String> adapter(KafkaMessageListenerContainer<String, String> container) { KafkaMessageDrivenChannelAdapter<String, String> kafkaMessageDrivenChannelAdapter = new KafkaMessageDrivenChannelAdapter<>(container); kafkaMessageDrivenChannelAdapter.setOutputChannel(fromKafka()); return kafkaMessageDrivenChannelAdapter; } @Bean public PollableChannel fromKafka() { return new QueueChannel(); }
producer配置
@Bean @ServiceActivator(inputChannel = "toKafka") public MessageHandler handler() throws Exception { KafkaProducerMessageHandler<String, String> handler = new KafkaProducerMessageHandler<>(kafkaTemplate()); handler.setTopicExpression(new LiteralExpression(topic)); handler.setMessageKeyExpression(new LiteralExpression(messageKey)); return handler; } @Bean public ProducerFactory<String, String> kafkaProducerFactory() { Map<String, Object> props = new HashMap<>(); props.put(ProducerConfig.BOOTSTRAP_SERVERS_CONFIG, brokerAddress); props.put(ProducerConfig.RETRIES_CONFIG, 0); props.put(ProducerConfig.BATCH_SIZE_CONFIG, 16384); props.put(ProducerConfig.LINGER_MS_CONFIG, 1); props.put(ProducerConfig.BUFFER_MEMORY_CONFIG, 33554432); props.put(ProducerConfig.KEY_SERIALIZER_CLASS_CONFIG, StringSerializer.class); props.put(ProducerConfig.VALUE_SERIALIZER_CLASS_CONFIG, StringSerializer.class); return new DefaultKafkaProducerFactory<>(props); } @Bean public KafkaTemplate<String, String> kafkaTemplate() { return new KafkaTemplate<>(kafkaProducerFactory()); }
收發信息
@Autowired @Qualifier("fromKafka") private PollableChannel fromKafka; @Autowired @Qualifier("toKafka") MessageChannel toKafka; Message msg = fromKafka.receive(10000l); toKafka.send(new GenericMessage<Object>(UUID.randomUUID().toString()));
spring cloud stream
基於Spring Integration構建,在spring cloud環境中又稍作加工,也稍微有點封裝了. 具體詳見spring cloud stream kafka實例以及spring-cloud-stream-binder-kafka屬性配置
doc
- spring-kafka
- spring-integration
- spring-integration-kafka
- spring-integration-samples-kafka
- spring-cloud-stream
- spring boot與kafka集成
- 總結kafka的consumer消費能力很低的情況下的處理方案