多易教育KAFKA實戰(2)-java生產者客戶端API示例代碼

案例一  入門實例

/**
 * java客戶端模擬生產者生產topic
 * topic是數據的分類主題
 */
public class Producter1 {

    public static void main(String[] args) throws InterruptedException {
        Properties p = new Properties();
        p.setProperty("key.serializer", "org.apache.kafka.common.serialization.StringSerializer");
        p.setProperty("value.serializer", "org.apache.kafka.common.serialization.StringSerializer");
        p.setProperty("bootstrap.servers", "lx01:9092,lx02:9092,lx03:9092");
        p.setProperty("acts", "1");
        p.setProperty("partitioner.class", "org.apache.kafka.clients.producer.internals.DefaultPartitioner");
        // 創建一個生產者對象
        KafkaProducer<String, String> kp = new KafkaProducer<>(p);
        for (int i = 0; i < 1000; i++) {
            Thread.sleep(1000);

            //向 hang=kafka主題中發送消息   主題和消息
            ProducerRecord pr = new ProducerRecord("hang-kafka", "hang" + i);
            kp.send(pr);
            System.out.println("---------------hang" + i + "----------------");
        }

        kp.close();

    }
}

案例一 入門程序(帶回調)

回調函數會在producer收到ack時調用,爲異步調用,該方法有兩個參數,分別是RecordMetadata和Exception,如果Exception爲null,說明消息發送成功,如果Exception不爲null,說明消息發送失敗。

注意:消息發送失敗會自動重試,不需要我們在回調函數中手動重試。

/**
 * java客戶端模擬生產者生產topic
 * topic是數據的分類主題
 */
public class Producter2 {

    public static void main(String[] args) throws InterruptedException {
        Properties p = new Properties();
        p.setProperty("key.serializer", "org.apache.kafka.common.serialization.StringSerializer");
        p.setProperty("value.serializer", "org.apache.kafka.common.serialization.StringSerializer");
        p.setProperty("bootstrap.servers", "lx01:9092,lx02:9092,lx03:9092");
        p.setProperty("acts", "1");
        p.setProperty("partitioner.class", "org.apache.kafka.clients.producer.internals.DefaultPartitioner");
        // 創建一個生產者對象
        KafkaProducer<String, String> kp = new KafkaProducer<>(p);
        for (int i = 0; i < 1000; i++) {
            Thread.sleep(1000);

            //向 hang=kafka主題中發送消息   主題和消息
            ProducerRecord pr = new ProducerRecord("hang-kafka", "hang" + i);
            kp.send(pr, new Callback() {
                @Override
                public void onCompletion(RecordMetadata recordMetadata, Exception e) {
                    if(e==null){
                        String topic = recordMetadata.topic();
                        long offset = recordMetadata.offset();
                        int p = recordMetadata.partition();
                        System.out.println(topic+"---offset:  "+offset+"---partittion: "+p);
                    }
                }
            });

        }

        kp.close();

    }
}

案例二 將mysql中的數據發送到kafka

/**
 * 將mysql中的數據  發送到kafka中
 */
public class SqlData2KAFKA {
    public static void main(String[] args) throws Exception {
        Properties p = new Properties();
        p.setProperty("key.serializer", "org.apache.kafka.common.serialization.StringSerializer");
        p.setProperty("value.serializer", "org.apache.kafka.common.serialization.StringSerializer");
        p.setProperty("bootstrap.servers", "lx01:9092,lx02:9092,lx03:9092");
        p.setProperty("acts", "1");
        p.setProperty("partitioner.class", "org.apache.kafka.clients.producer.internals.DefaultPartitioner");

        KafkaProducer<String, String> product = new KafkaProducer<>(p);

        // 獲取數據庫連接對象
        Connection conn = DriverManager.getConnection("jdbc:mysql://localhost:3306/db_doit15", "root", "root");
        PreparedStatement ps = conn.prepareStatement("select * from tb_product ");
        ResultSet rs = ps.executeQuery();
        while (rs.next()) {
            int id = rs.getInt("id");
            String name = rs.getString("name");
            int price = rs.getInt("price");
            String category = rs.getString("category");
            String value = id + "," + name + "," + price + "," + category;
            ProducerRecord<String, String> msg = new ProducerRecord<>("tb_product", value);
            product.send(msg);
        }
        product.close();
        rs.close();
        ps.close();
        conn.close();
    }
}

 

案例三  將日誌中的數據發送到kafka

/**
 * 將日誌中的數據發送到kafka中
 */
public class Log2KafkKa {
    public static void main(String[] args) throws Exception {
        Properties p = new Properties();
        p.setProperty("key.serializer", "org.apache.kafka.common.serialization.StringSerializer");
        p.setProperty("value.serializer", "org.apache.kafka.common.serialization.StringSerializer");
        p.setProperty("bootstrap.servers", "lx01:9092,lx02:9092,lx03:9092");
        p.setProperty("acts", "1");
        p.setProperty("partitioner.class", "org.apache.kafka.clients.producer.internals.DefaultPartitioner");
        // 創建一個生產者
        KafkaProducer<String, String> producter = new KafkaProducer<>(p);

        BufferedReader br = new BufferedReader(new FileReader("D:\\data\\flow\\input\\flow.log"));
        String line = null;
        //讀取數據   每行的數據爲一個消息
        while ((line = br.readLine()) != null) {
            ProducerRecord<String, String> msg = new ProducerRecord<>("flow.log", line);
            // 生產者將消息 發送出去
            producter.send(msg);
        }
        // 是放資源
        br.close();
        producter.close();
    }
}

 

發表評論
所有評論
還沒有人評論,想成為第一個評論的人麼? 請在上方評論欄輸入並且點擊發布.
相關文章