spring boot整合kafka記錄

pom文件

<dependencies>
        <dependency>
            <groupId>org.springframework.boot</groupId>
            <artifactId>spring-boot-starter-web</artifactId>
        </dependency>
 <dependency>
            <groupId>org.springframework.kafka</groupId>
            <artifactId>spring-kafka</artifactId>
        </dependency>

yml配置

spring:
  kafka:
#    bootstrap-servers: 192.168.10.45:9092
    bootstrap-servers: 192.168.198.128:9092
    producer:
      # 重試次數,默認Integer.MAX_VALUE
      retries: 1
      # 同一批次內存大小(默認16K)
      batch-size: 16384
      # 生產者內存緩存區大小(32M)
      buffer-memory: 33554432
      # key和value的序列化(默認,可以不設置)
      key-serializer: org.apache.kafka.common.serialization.StringSerializer
      value-serializer: org.apache.kafka.common.serialization.StringSerializer
      # ack應答機制,默認1,即只需要確認leader收到消息
      acks: 1
        # springboot1.5.16自動裝配中不支持properties下的其他配置,不知道爲啥。2.x版本可以
        #properties:
      # 使用自定義的分區選擇器
      #{partitioner.class: com.msy.kafka.MyPartition, acks: all}
    consumer:
      group-id: test
      enable-auto-commit: false
      # earliest:從頭開始消費   latest:從最新的開始消費   默認latest
      auto-offset-reset: latest
      # key和value反序列化(默認,可以不設置)
      key-deserializer: org.apache.kafka.common.serialization.StringDeserializer
      value-deserializer: org.apache.kafka.common.serialization.StringDeserializer
    listener:
      # 消費者併發能力
      concurrency: 6
      # 設置手動提交的時候,需要設置ackMode
      ack-mode: MANUAL
    topic: test5

生產者需要調用生成數據

package com.example.sms.middleware.sms_middleware.kafka;

import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.beans.factory.annotation.Value;
import org.springframework.boot.ApplicationArguments;
import org.springframework.boot.ApplicationRunner;
import org.springframework.kafka.core.KafkaTemplate;
import org.springframework.stereotype.Component;

/**
 * @Author kang
 * @Date 2020/6/19$ 10:03$
 **/
@Component
public class Producer {

    @Autowired
    private KafkaTemplate<String, String> kafkaTemplate;

    @Value("${spring.kafka.topic}")
    private String topic;

    public void sendMessage(){
        kafkaTemplate.send(topic,"message");
    }

//    @Override
//    public void run(ApplicationArguments args) throws Exception {
//        System.out.println("11111");
////        new Producer().sendMessage();
//    }
}

消費者 

package com.example.sms.middleware.sms_middleware.kafka;

import lombok.extern.slf4j.Slf4j;
import org.apache.kafka.clients.consumer.ConsumerRecord;
import org.springframework.kafka.annotation.KafkaListener;
import org.springframework.kafka.support.Acknowledgment;
import org.springframework.stereotype.Component;

/**
 * @Author kang
 * @Date 2020/6/19$ 10:04$
 **/
@Component
@Slf4j
public class Consumer {

    @KafkaListener(topics = "test5")  // 支持監聽多個topic的消息
    public void consumerMessage(ConsumerRecord<String, String> consumerRecord, Acknowledgment ack) {
        try {
            System.out.println("1212121");
            String value = consumerRecord.value();
            System.out.println("1212121"+value);
            log.info("監聽到的消息爲:{}", value);
            // 業務處理......
        } catch (Exception e) {
            e.printStackTrace();
        } finally {
            ack.acknowledge();
        }
    }
}

 

錯誤接口訪問不到時報錯404

解決:

@SpringBootApplication默認的掃描位置就是Application所在的同級目錄和子目錄,我們修改一下

 

發表評論
所有評論
還沒有人評論,想成為第一個評論的人麼? 請在上方評論欄輸入並且點擊發布.
相關文章