flink DataSet 連接 mongos集羣使用mongo-hadoop

flink 官方示例比較簡單,也很久沒更新過了。

mongos生產集羣需要認證權限,按文檔提示配置好規則,這裏output到測試庫,所以沒配置太多

這裏是單線程讀取,還不清楚可不可以通過配置實現並行讀取。

import com.mongodb.hadoop.MongoInputFormat;
import com.mongodb.hadoop.MongoOutputFormat;
import com.mongodb.hadoop.io.BSONWritable;
import example.flink.KeySelector.RecordSeclectId;
import example.flink.mapFunction.BSONMapToRecord;
import example.flink.reduceFunction.KeyedGroupReduce;
import org.apache.flink.api.java.DataSet;
import org.apache.flink.api.java.ExecutionEnvironment;
import org.apache.flink.api.java.hadoop.mapreduce.HadoopInputFormat;
import org.apache.flink.api.java.hadoop.mapreduce.HadoopOutputFormat;
import org.apache.flink.api.java.tuple.Tuple2;
import org.apache.hadoop.mapreduce.Job;
import org.bson.BSONObject;

public class MongoSet {
	public static void main(String[] args) throws Exception {
	    ExecutionEnvironment env = ExecutionEnvironment.getExecutionEnvironment();
	    env.setParallelism(4);
            Job inputJob = Job.getInstance();
            //inputJob.getConfiguration().set("mongo.input.uri", "mongodb://readuser:readpw@mongos01:port,mongos02:port,mongos03:port/db.collection");
            //inputJob.getConfiguration().set("mongo.auth.uri", "mongodb://root:rootpw@mongos01:port,mongos02:port,mongos03:port/admin");
           
            inputJob.getConfiguration().set("mongo.input.uri", "mongodb://readuser:readpw@mongos01:port,mongos02:port,mongos03:port/db.collection?&authMechanism=SCRAM-SHA-1&authSource=admin&readPreference=secondary");
            inputJob.getConfiguration().set("mongo.input.split.read_shard_chunks", "true");
	    inputJob.getConfiguration().set("mongo.input.split.create_input_splits", "false");
            inputJob.getConfiguration().set("mongo.input.split_size","16");
            inputJob.getConfiguration().set("mongo.input.query", "{'createDateTime': {\"$lte\":{\"$date\":\"2019-05-27T00:00:00.000Z\"}, \"$gte\":{\"$date\":\"2010-03-17T00:00:00.000Z\"}}}");
            inputJob.getConfiguration().set("mongo.input.fields", "{\"Id\":\"1\",\"saleType\":\"1\",\"saleNum\":\"1\",\"createDateTime\":\"1\"}");

            HadoopInputFormat<Object, BSONObject> hdIf =
				new HadoopInputFormat<>(new MongoInputFormat(), Object.class, BSONObject.class, inputJob);

	    DataSet<Tuple2<Object, BSONObject>> inputNew = env.createInput(hdIf);

	    DataSet<Tuple2<String, BSONWritable>> personInfoDataSet = inputNew
				.map(new BSONMapToRecord())
				.groupBy(new RecordSeclectId())
				.reduceGroup(new KeyedGroupReduce());

	    Job outputJob = Job.getInstance();
	    outputJob.getConfiguration().set("mongo.output.uri", "mongodb://mongo:27017/db.collection");
	    outputJob.getConfiguration().set("mongo.output.batch.size", "8");
	    outputJob.getConfiguration().set("mapreduce.output.fileoutputformat.outputdir", "/tmp");
	    personInfoDataSet.output(new HadoopOutputFormat<>(new MongoOutputFormat<>(), outputJob));

	    env.execute(MongoSet.class.getCanonicalName());
}

 

mongo-hadoop文檔:mongo-hadoop: Authentication

 

發表評論
所有評論
還沒有人評論,想成為第一個評論的人麼? 請在上方評論欄輸入並且點擊發布.
相關文章