flink DataSet 连接 mongos集群使用mongo-hadoop

flink 官方示例比较简单,也很久没更新过了。

mongos生产集群需要认证权限,按文档提示配置好规则,这里output到测试库,所以没配置太多

这里是单线程读取,还不清楚可不可以通过配置实现并行读取。

import com.mongodb.hadoop.MongoInputFormat;
import com.mongodb.hadoop.MongoOutputFormat;
import com.mongodb.hadoop.io.BSONWritable;
import example.flink.KeySelector.RecordSeclectId;
import example.flink.mapFunction.BSONMapToRecord;
import example.flink.reduceFunction.KeyedGroupReduce;
import org.apache.flink.api.java.DataSet;
import org.apache.flink.api.java.ExecutionEnvironment;
import org.apache.flink.api.java.hadoop.mapreduce.HadoopInputFormat;
import org.apache.flink.api.java.hadoop.mapreduce.HadoopOutputFormat;
import org.apache.flink.api.java.tuple.Tuple2;
import org.apache.hadoop.mapreduce.Job;
import org.bson.BSONObject;

public class MongoSet {
	public static void main(String[] args) throws Exception {
	    ExecutionEnvironment env = ExecutionEnvironment.getExecutionEnvironment();
	    env.setParallelism(4);
            Job inputJob = Job.getInstance();
            //inputJob.getConfiguration().set("mongo.input.uri", "mongodb://readuser:readpw@mongos01:port,mongos02:port,mongos03:port/db.collection");
            //inputJob.getConfiguration().set("mongo.auth.uri", "mongodb://root:rootpw@mongos01:port,mongos02:port,mongos03:port/admin");
           
            inputJob.getConfiguration().set("mongo.input.uri", "mongodb://readuser:readpw@mongos01:port,mongos02:port,mongos03:port/db.collection?&authMechanism=SCRAM-SHA-1&authSource=admin&readPreference=secondary");
            inputJob.getConfiguration().set("mongo.input.split.read_shard_chunks", "true");
	    inputJob.getConfiguration().set("mongo.input.split.create_input_splits", "false");
            inputJob.getConfiguration().set("mongo.input.split_size","16");
            inputJob.getConfiguration().set("mongo.input.query", "{'createDateTime': {\"$lte\":{\"$date\":\"2019-05-27T00:00:00.000Z\"}, \"$gte\":{\"$date\":\"2010-03-17T00:00:00.000Z\"}}}");
            inputJob.getConfiguration().set("mongo.input.fields", "{\"Id\":\"1\",\"saleType\":\"1\",\"saleNum\":\"1\",\"createDateTime\":\"1\"}");

            HadoopInputFormat<Object, BSONObject> hdIf =
				new HadoopInputFormat<>(new MongoInputFormat(), Object.class, BSONObject.class, inputJob);

	    DataSet<Tuple2<Object, BSONObject>> inputNew = env.createInput(hdIf);

	    DataSet<Tuple2<String, BSONWritable>> personInfoDataSet = inputNew
				.map(new BSONMapToRecord())
				.groupBy(new RecordSeclectId())
				.reduceGroup(new KeyedGroupReduce());

	    Job outputJob = Job.getInstance();
	    outputJob.getConfiguration().set("mongo.output.uri", "mongodb://mongo:27017/db.collection");
	    outputJob.getConfiguration().set("mongo.output.batch.size", "8");
	    outputJob.getConfiguration().set("mapreduce.output.fileoutputformat.outputdir", "/tmp");
	    personInfoDataSet.output(new HadoopOutputFormat<>(new MongoOutputFormat<>(), outputJob));

	    env.execute(MongoSet.class.getCanonicalName());
}

 

mongo-hadoop文档:mongo-hadoop: Authentication

 

發表評論
所有評論
還沒有人評論,想成為第一個評論的人麼? 請在上方評論欄輸入並且點擊發布.
相關文章