1、集羣啓動腳本,目前只用192.168.2.131 (主機名master),192.168.2.130(主機名slave1)
#!/bin/bash
zkPath=/home/master/zookeeper-3.4.14/bin
kafkaPath=/home/master/kafka_2.11-2.2.1
function sshStartZk(){
echo "登陸主機"$1
ssh root@$1 << 'EOF'
cd /home/master/zookeeper-3.4.14/bin
./zkServer.sh stop
./zkServer.sh start
echo $1"啓動->zookeeper成功"
exit
EOF
}
function sshStartKafka(){
echo "登陸主機"$1
ssh root@$1 << 'EOF'
cd /home/master/kafka_2.11-2.2.1/bin
./kafka-server-stop.sh
./kafka-server-start.sh ../config/server.properties
echo "啓動->kafka成功"$1
exit
EOF
}
echo "開始啓動zk"
sshStartZk "master";
sshStartZk "slave1";
echo "啓動zk完成"
echo "開始啓動kafka"
sshStartKafka "master";
sshStartKafka "slave1";
echo "啓動kafka完成"
2、java api操作kafka
(1)加入maven依賴
<dependency>
<groupId>org.apache.kafka</groupId>
<artifactId>kafka_2.12</artifactId>
<version>2.2.1</version>
</dependency>
<dependency>
<groupId>org.apache.kafka</groupId>
<artifactId>kafka-clients</artifactId>
<version>2.2.1</version>
</dependency>
(2)調用工具類
public class KafkaConnectUtils {
private final static Logger LOGGER = LoggerFactory.getLogger(KafkaConnectUtils.class);
/**
* 創建生產者
*
* @return
*/
public static KafkaProducer<String, Object> createProducer() {
Properties props = new Properties();
// Kafka服務端的主機名和端口號
props.put("bootstrap.servers", "master:9092");
// 等待所有副本節點的應答
props.put("acks", "all");
// 消息發送最大嘗試次數
props.put("retries", 0);
// 一批消息處理大小
props.put("batch.size", 16384);
// 增加服務端請求延時
props.put("linger.ms", 1);
// 發送緩存區內存大小 32m
props.put("buffer.memory", 33554432);
// key序列化
props.put("key.serializer", "org.apache.kafka.common.serialization.StringSerializer");
// value序列化
props.put("value.serializer", "org.apache.kafka.common.serialization.StringSerializer");
return new KafkaProducer<>(props);
}
/**
* 發送消息
*/
public static void kafkaSendMsg(String topic, String key, Object value) {
KafkaProducer<String, Object> producer = KafkaConnectUtils.createProducer();
ProducerRecord<String, Object> record = new ProducerRecord<>(topic, key, value);
producer.send(record, (recordMetadata, ex) -> {
if (recordMetadata == null) {
LOGGER.info("kafkaSendMsg->發送消息失敗:" + ex.toString());
} else {
LOGGER.info("kafkaSendMsg->發送消息成功:topic=" + recordMetadata.topic()
+ ",key=" +recordMetadata.serializedKeySize()+",value="+recordMetadata.serializedValueSize());
}
});
producer.close();
}
/**
* 創建消費者
*
* @param
* @return
*/
public static KafkaConsumer<String, Object> createKafkaConsumer() {
Properties props = new Properties();
// 定義kakfa 服務的地址,不需要將所有broker指定上
props.put("bootstrap.servers", "master:9092");
//制定consumer group
props.put("group.id", "AppGroup");
// 是否自動確認offset
props.put("enable.auto.commit", "true");
// 自動確認offset的時間間隔
props.put("auto.commit.interval.ms", "1000");
// key的序列化類
props.put("key.deserializer", "org.apache.kafka.common.serialization.StringDeserializer");
// value的序列化類
props.put("value.deserializer", "org.apache.kafka.common.serialization.StringDeserializer");
return new KafkaConsumer<>(props);
}
/**
* 拉取消費信息
*/
public static void pullKafkaMsg(List<String> topics) {
KafkaConsumer<String, Object> kafkaConsumer = createKafkaConsumer();
///訂閱多個主題topic
kafkaConsumer.subscribe(topics);
ConsumerRecords<String, Object> consumerRecord = kafkaConsumer.poll(Duration.ofSeconds(10));
for (ConsumerRecord<String, Object> next : consumerRecord) {
LOGGER.info("消息消費{}topic=" + next.topic() + ",key=" + next.key()+",value="+next.value());
}
}
}
(3)測試調用
@RunWith(SpringRunner.class)
@SpringBootTest
public class AppTest {
@Test
public void kafkaSendMsg(){
KafkaConnectUtils.kafkaSendMsg("testdemo","liucui","劉翠");
KafkaConnectUtils.kafkaSendMsg("testdemo","liuping","劉萍");
}
@Test
public void kafkaPullMsg(){
List<String> list=new ArrayList<>();
list.add("testdemo");
KafkaConnectUtils.pullKafkaMsg(list);
}
}