Flink MongoDBSink

MongoUtils

package com.soul.utils;

import com.mongodb.MongoClient;
import com.mongodb.MongoCredential;
import com.mongodb.ServerAddress;

import java.util.ArrayList;
import java.util.List;

/**
 * @author soulChun
 * @create 2018-12-24-14:08
 */
public class MongoUtils {

    public static MongoClient getConnect(){
        ServerAddress serverAddress = new ServerAddress("localhost", 27017);
        List<MongoCredential> credential = new ArrayList<>();
        //MongoCredential.createScramSha1Credential()三個參數分別爲 用戶名 數據庫名稱 密碼
        MongoCredential mongoCredential1 = MongoCredential.createScramSha1Credential("root", "soul_db", "root".toCharArray());
        credential.add(mongoCredential1);
        //通過連接認證獲取MongoDB連接
        MongoClient mongoClient = new MongoClient(serverAddress, credential);
        return mongoClient;
    }

}

MongoDBSink

package com.soul.kafka;

import com.mongodb.MongoClient;
import com.mongodb.client.MongoCollection;
import com.mongodb.client.MongoDatabase;
import com.soul.utils.MongoUtils;
import org.apache.flink.api.java.tuple.Tuple5;
import org.apache.flink.configuration.Configuration;
import org.apache.flink.streaming.api.functions.sink.RichSinkFunction;
import org.bson.Document;

import java.util.ArrayList;
import java.util.List;

/**
 * @author soulChun
 * @create 2018-12-24-10:05
 */
public class MongoDBSink extends RichSinkFunction<Tuple5<String, String, String, String, String>> {
    private static final long serialVersionUID = 1L;
    MongoClient mongoClient = null;

    public void invoke(Tuple5<String, String, String, String, String> value) {
        try {
            if (mongoClient != null) {
                mongoClient = MongoUtils.getConnect();
                MongoDatabase db = mongoClient.getDatabase("soul_db");
                MongoCollection collection = db.getCollection("kafka");
                List<Document> list = new ArrayList<>();
                Document doc = new Document();
                doc.append("IP", value.f0);
                doc.append("TIME", value.f1);
                doc.append("CourseID", value.f2);
                doc.append("Status_Code", value.f3);
                doc.append("Referer", value.f4);
                list.add(doc);
                System.out.println("Insert Starting");
                collection.insertMany(list);
            }
        } catch (Exception e) {
            e.printStackTrace();
        }
    }

    public void open(Configuration parms) throws Exception {
        super.open(parms);
        mongoClient = MongoUtils.getConnect();
    }

    public void close() throws Exception {
        if (mongoClient != null) {
            mongoClient.close();
        }
    }
}

可以使用此Sink替換Flink清洗Kafka數據存入MySQL測試的MySQLSink

發表評論
所有評論
還沒有人評論,想成為第一個評論的人麼? 請在上方評論欄輸入並且點擊發布.
相關文章