SpringBoot集成Kafka實現消息上報

一、該篇博客使用技術版本

SpringBoot:1.5.9.RELEASE

zookeeper:zookeeper-3.4.5

kafka:kafka_2.10-0.10.2.1


二、SpringBoot集成Kafka

1、首先修改kafka的server.properties

root@VM-0-3-ubuntu:/usr/local/kafka_2.10-0.10.2.1/config# vi server.properties

找到配置文件中的advertised.listeners,將自己服務器的ip加入

advertised.listeners=PLAINTEXT://這裏是服務器的ip:9092

重啓zk,kafka服務,開啓kafka生產者、消費者

2、集成編碼

建SpringBoot項目

目錄如下,其中測試下的kafka包是測試kafka生產消費,該篇博客不做闡述

application.properties

server.port=6097
spring.application.name=linjieadmin

#kafka
kafka.consumer.zookeeper.connect=啓動zookeeper服務器的ip:2181
kafka.consumer.servers=啓動kafka服務器的ip:9092
#消費完成自動提交
kafka.consumer.enable.auto.commit=true
kafka.consumer.session.timeout=6000
kafka.consumer.auto.commit.interval=100
#消費策略
kafka.consumer.auto.offset.reset=latest
kafka.consumer.topic=wingcloud
kafka.consumer.group.id=wingcloud
kafka.consumer.concurrency=10

kafka.producer.servers=啓動kafka服務器的ip:9092
kafka.producer.retries=0
kafka.producer.batch.size=4096
kafka.producer.linger=1
kafka.producer.buffer.memory=40960
DsInfoSJservice.java
package com.example.flinkdemo.controller;

import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.http.HttpStatus;
import org.springframework.kafka.core.KafkaTemplate;
import org.springframework.stereotype.Controller;
import org.springframework.web.bind.annotation.RequestBody;
import org.springframework.web.bind.annotation.RequestMapping;
import org.springframework.web.bind.annotation.RequestMethod;

import javax.servlet.http.HttpServletRequest;
import javax.servlet.http.HttpServletResponse;
import java.io.IOException;
import java.io.OutputStream;
import java.io.PrintWriter;

@Controller
@RequestMapping("DsInfoSJservice")
public class DsInfoSJservice {
    @Autowired
    private KafkaTemplate kafkaTemplate;

    @RequestMapping(value = "webInfoSJService",method = RequestMethod.POST)
    public void webInfoSJService(@RequestBody String jsonstr, HttpServletRequest request, HttpServletResponse response){
        System.out.println("hello"+jsonstr);
        //業務開始
        kafkaTemplate.send("wingcloud","key",jsonstr);

        //業務結束

        PrintWriter printWriter = getWriter(response);
        response.setStatus(HttpStatus.OK.value());
        printWriter.write("success");
        closeprintwriter(printWriter);
    }

    private PrintWriter getWriter(HttpServletResponse response){
        response.setCharacterEncoding("utf-8");
        response.setContentType("application/json");
        OutputStream out = null;
        PrintWriter printWriter = null;
        try {
            out = response.getOutputStream();
            printWriter = new PrintWriter(out);
        } catch (IOException e) {
            e.printStackTrace();
        }
        return printWriter;
    }

    private void closeprintwriter(PrintWriter printWriter){
        printWriter.flush();
        printWriter.close();
    }
}
KafkaProducerConfig.java
package com.example.flinkdemo.controller;


import org.apache.kafka.clients.producer.ProducerConfig;
import org.apache.kafka.common.serialization.StringSerializer;
import org.springframework.beans.factory.annotation.Value;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.Configuration;
import org.springframework.kafka.annotation.EnableKafka;
import org.springframework.kafka.core.DefaultKafkaProducerFactory;
import org.springframework.kafka.core.KafkaTemplate;
import org.springframework.kafka.core.ProducerFactory;

import java.util.HashMap;
import java.util.Map;

@Configuration
@EnableKafka
public class KafkaProducerConfig {
    @Value("${kafka.producer.servers}")
    private String servers;
    @Value("${kafka.producer.retries}")
    private int retries;
    @Value("${kafka.producer.batch.size}")
    private int batchSize;
    @Value("${kafka.producer.linger}")
    private int linger;
    @Value("${kafka.producer.buffer.memory}")
    private int bufferMemory;


    public Map<String,Object> producerConfigs(){
        Map<String,Object> props = new HashMap<>();
        props.put(ProducerConfig.BOOTSTRAP_SERVERS_CONFIG,servers);
        props.put(ProducerConfig.RETRIES_CONFIG,retries);
        props.put(ProducerConfig.BATCH_SIZE_CONFIG,batchSize);
        props.put(ProducerConfig.LINGER_MS_CONFIG,linger);
        props.put(ProducerConfig.BUFFER_MEMORY_CONFIG,bufferMemory);
        props.put(ProducerConfig.KEY_SERIALIZER_CLASS_CONFIG,StringSerializer.class);
        props.put(ProducerConfig.VALUE_SERIALIZER_CLASS_CONFIG,StringSerializer.class);
        return props;
    }

    public ProducerFactory<String,String> producerFactory(){
        return new DefaultKafkaProducerFactory<String, String>(producerConfigs());
    }

    @Bean
    public KafkaTemplate<String,String> kafkaTemplate(){
        return new KafkaTemplate<String, String>(producerFactory());
    }
}
DsClienttest.java
package com.example.flinkdemo;

import java.io.*;
import java.net.HttpURLConnection;
import java.net.URL;


public class DsClienttest {
    public static void main(String[] args){
        String message = "kafkatest";
        String adrress = "http://localhost:6097/DsInfoSJservice/webInfoSJService";
        try {
            URL url = new URL(adrress);
            HttpURLConnection conn = (HttpURLConnection) url.openConnection();
            conn.setRequestMethod("POST");//使用post
            conn.setDoInput(true);
            conn.setDoOutput(true);
            conn.setAllowUserInteraction(true);//允許重定向
            conn.setUseCaches(false);//關緩存
            conn.setReadTimeout(6*1000);//超時時間6秒
            //這裏寫自己瀏覽器的User-Agent
            conn.setRequestProperty("User-Agent","這裏寫自己瀏覽器的User-Agent");
            conn.setRequestProperty("Content-Type","application/json");
            conn.connect();
            OutputStream outputStream = conn.getOutputStream();
            BufferedOutputStream out = new BufferedOutputStream(outputStream);
            out.write(message.getBytes());
            out.flush();

            String temp = "";
            InputStream in = conn.getInputStream();
            byte[] tempbytes = new byte[1024];
            while (in.read(tempbytes,0,1024) != -1){
                temp+=new String(tempbytes);
            }
            System.out.println(conn.getResponseCode());
            System.out.println(temp);
        } catch (Exception e) {
            e.printStackTrace();
        }

    }
}

好了,編碼這些類即可

啓動FlinkdemoApplication.java

運行DsClienttest類下的main方法

可以在其控制檯下看到200,success。並且在FlinkdemoApplication.java控制檯下看到相關配置信息表示集成成功

最後可以在之前開啓的kafka消費者終端上看到打印了:kafkatest

 

發表評論
所有評論
還沒有人評論,想成為第一個評論的人麼? 請在上方評論欄輸入並且點擊發布.
相關文章