Spark遷移Hive數據到MongoDB(UpdateSave|UpsertSave)

Hive emp表數據

hive (soul)> select * from emp;
OK
emp.empno	emp.ename	emp.job	emp.age	emp.deptno
7369	SMITH	CLERK	24	10
7499	ALLEN	SALESMAN	30	20
7521	WARD	SALESMAN	25	30
7654	MARTIN	SALESMAN	23	10
7698	BLAKE	MANAGER	29	40

pom

  <properties>
    <scala.version>2.11.8</scala.version>
    <spark.version>2.2.0</spark.version>
    <hive.version>1.1.0</hive.version>
  </properties>

<dependency>
      <groupId>org.mongodb.spark</groupId>
      <artifactId>mongo-spark-connector_2.11</artifactId>
      <version>${spark.version}</version>
    </dependency>

    <dependency>
      <groupId>org.mongodb</groupId>
      <artifactId>mongo-java-driver</artifactId>
      <version>3.6.3</version>
    </dependency>


    <dependency>
      <groupId>org.mongodb</groupId>
      <artifactId>bson</artifactId>
      <version>3.4.0</version>
    </dependency>


    <!--SparkHive-->
    <dependency>
      <groupId>org.apache.spark</groupId>
      <artifactId>spark-hive_2.11</artifactId>
      <version>${spark.version}</version>
    </dependency>
    <!--MySQL Driver-->
    <dependency>
      <groupId>mysql</groupId>
      <artifactId>mysql-connector-java</artifactId>
      <version>5.1.39</version>
    </dependency>

    <dependency>
      <groupId>org.apache.spark</groupId>
      <artifactId>spark-sql_2.11</artifactId>
      <version>${spark.version}</version>
    </dependency>

MongoDB 的updateSave(對已有數據進行更新) upsertSave(有就更新沒有就插入)工具類

package com.soul.utils

import com.mongodb.client.MongoCollection
import com.mongodb.client.model.{ReplaceOneModel, UpdateOneModel}
import com.mongodb.spark.MongoConnector
import com.mongodb.spark.config.WriteConfig
import org.apache.spark.rdd.RDD
import org.bson.Document
import scala.collection.JavaConverters._
import scala.reflect.ClassTag

/**
  * @author soulChun
  * @create 2018-12-18-20:37
  */
object MongoUtils {


  val DefaultMaxBatchSize = 100000

  def updateSave[D: ClassTag](rdd: RDD[UpdateOneModel[Document]]): Unit = updateSave(rdd, WriteConfig(rdd.sparkContext))

  def updateSave[D: ClassTag](rdd: RDD[UpdateOneModel[D]], writeConfig: WriteConfig): Unit = {
    val mongoConnector = MongoConnector(writeConfig.asOptions)
    rdd.foreachPartition(iter => if (iter.nonEmpty) {
      mongoConnector.withCollectionDo(writeConfig, { collection: MongoCollection[D] =>
        iter.grouped(DefaultMaxBatchSize).foreach(batch => collection.bulkWrite(batch.toList.asJava))
      })
    })
  }


  def upsertSave[D: ClassTag](rdd: RDD[ReplaceOneModel[Document]]): Unit = upsertSave(rdd, WriteConfig(rdd.sparkContext))

  def upsertSave[D: ClassTag](rdd: RDD[ReplaceOneModel[D]], writeConfig: WriteConfig): Unit = {
    val mongoConnector = MongoConnector(writeConfig.asOptions)
    rdd.foreachPartition(iter => if (iter.nonEmpty) {
      mongoConnector.withCollectionDo(writeConfig, { collection: MongoCollection[D] =>
        iter.grouped(DefaultMaxBatchSize).foreach(batch => collection.bulkWrite(batch.toList.asJava))
      })
    })
  }


}

一、將DF存入MongoDB

package com.soul.sparkmg;
import com.mongodb.client.model.Filters;
import com.mongodb.client.model.ReplaceOneModel;
import com.mongodb.client.model.UpdateOneModel;
import com.mongodb.client.model.UpdateOptions;
import com.mongodb.spark.MongoSpark;
import com.soul.utils.MongoUtils;
import org.apache.spark.SparkConf;
import org.apache.spark.api.java.JavaRDD;
import org.apache.spark.api.java.JavaSparkContext;
import org.apache.spark.api.java.function.Function;
import org.apache.spark.sql.Dataset;
import org.apache.spark.sql.Row;
import org.apache.spark.sql.hive.HiveContext;
import org.bson.Document;

/**
 * @author soulChun
 * @create 2018-12-15-16:17
 */
public class SparkHiveToMg {
    public static void main(String[] args) {

        SparkConf conf = new SparkConf().setAppName("SparkHiveToMg").setMaster("local[2]");
        //如何你的密碼中有@符號 請用%40代替
        conf.set("spark.mongodb.output.uri", "mongodb://root:[email protected]/soul_db.emp");
        JavaSparkContext jsc =  new JavaSparkContext(conf);
        HiveContext hc = new HiveContext(jsc);
        Dataset<Row> df  =hc.table("soul.emp");

        //直接存DF到MongoDB
        MongoSpark.save(df);
        jsc.stop();
    }
}

啓動程序會自動在MongoDB建表emp(emp是在uri中指定的,可以自己修改),然後將數據插入,發現五條數據已存入MongoDB。
在這裏插入圖片描述

二、將RDD存入MongoDB

package com.soul.sparkmg;

import com.mongodb.client.model.Filters;
import com.mongodb.client.model.ReplaceOneModel;
import com.mongodb.client.model.UpdateOneModel;
import com.mongodb.client.model.UpdateOptions;
import com.mongodb.spark.MongoSpark;
import com.soul.utils.MongoUtils;
import org.apache.spark.SparkConf;
import org.apache.spark.api.java.JavaRDD;
import org.apache.spark.api.java.JavaSparkContext;
import org.apache.spark.api.java.function.Function;
import org.apache.spark.sql.Dataset;
import org.apache.spark.sql.Row;
import org.apache.spark.sql.hive.HiveContext;
import org.bson.Document;

/**
 * @author soulChun
 * @create 2018-12-15-16:17
 */
public class SparkHiveToMg {
    public static void main(String[] args) {

        SparkConf conf = new SparkConf().setAppName("SparkHiveToMg").setMaster("local[2]");
        //如何你的密碼中有@符號 請用%40代替
        conf.set("spark.mongodb.output.uri", "mongodb://root:[email protected]/soul_db.emp");
        JavaSparkContext jsc =  new JavaSparkContext(conf);
        HiveContext hc = new HiveContext(jsc);
        Dataset<Row> df  =hc.table("soul.emp");

        //直接存DF到MongoDB
//        MongoSpark.save(df);

        JavaRDD<Row> rdd = df.toJavaRDD();
        //insert
        JavaRDD<Document> rddDoc= rdd.map(new Function<Row, Document>() {
            public Document call(Row row) throws Exception {
                Document doc = new Document();
                doc.put("empno",row.get(0));
                doc.put("ename",row.get(1));
                doc.put("job",row.get(2));
                doc.put("age",row.get(3));
                doc.put("deptno",row.get(4));
                return doc;
            }
        });
        MongoSpark.save(rddDoc);

        jsc.stop();
    }
}

三、對已有數據進行更新
將MongoDB中第一個文檔的age改成100
在這裏插入圖片描述

然後運行以下程序

package com.soul.sparkmg;

import com.mongodb.client.model.Filters;
import com.mongodb.client.model.ReplaceOneModel;
import com.mongodb.client.model.UpdateOneModel;
import com.mongodb.client.model.UpdateOptions;
import com.mongodb.spark.MongoSpark;
import com.soul.utils.MongoUtils;
import org.apache.spark.SparkConf;
import org.apache.spark.api.java.JavaRDD;
import org.apache.spark.api.java.JavaSparkContext;
import org.apache.spark.api.java.function.Function;
import org.apache.spark.sql.Dataset;
import org.apache.spark.sql.Row;
import org.apache.spark.sql.hive.HiveContext;
import org.bson.Document;

/**
 * @author soulChun
 * @create 2018-12-15-16:17
 */
public class SparkHiveToMg {
    public static void main(String[] args) {

        SparkConf conf = new SparkConf().setAppName("SparkHiveToMg").setMaster("local[2]");
        //如何你的密碼中有@符號 請用%40代替
        conf.set("spark.mongodb.output.uri", "mongodb://root:[email protected]/soul_db.emp");
        JavaSparkContext jsc =  new JavaSparkContext(conf);
        HiveContext hc = new HiveContext(jsc);
        Dataset<Row> df  =hc.table("soul.emp");

        //直接存DF到MongoDB
//        MongoSpark.save(df);
        JavaRDD<Row> rdd = df.toJavaRDD();

        //update
        JavaRDD<UpdateOneModel<Document>> rddUpdate= rdd.map(new Function<Row, UpdateOneModel<Document>>() {
            public UpdateOneModel<Document> call(Row row) throws Exception {
                Document doc = new Document();
                doc.put("empno",row.get(0));
                doc.put("ename",row.get(1));
                doc.put("job",row.get(2));
                doc.put("age",row.get(3));
                doc.put("deptno",row.get(4));
                Document modifiers = new Document();
                modifiers.put("$set",doc);
                return new UpdateOneModel<Document>(Filters.eq("empno",doc.get("empno")),modifiers,new UpdateOptions().upsert(true));
            }
        });
        MongoUtils.updateSave(rddUpdate.rdd(),rddUpdate.classTag());
        jsc.stop();
    }
}

運行完畢後查看MongoDB中還是五條數據,而且age已更新爲原有的30
在這裏插入圖片描述

四、對已有數據進行更新而且沒有的進行插入
刪除MongoDB中emp的4、5文檔,而且將第一個文檔的age改爲200
在這裏插入圖片描述

然後運行以下程序

package com.soul.sparkmg;

import com.mongodb.client.model.Filters;
import com.mongodb.client.model.ReplaceOneModel;
import com.mongodb.client.model.UpdateOneModel;
import com.mongodb.client.model.UpdateOptions;
import com.mongodb.spark.MongoSpark;
import com.soul.utils.MongoUtils;
import org.apache.spark.SparkConf;
import org.apache.spark.api.java.JavaRDD;
import org.apache.spark.api.java.JavaSparkContext;
import org.apache.spark.api.java.function.Function;
import org.apache.spark.sql.Dataset;
import org.apache.spark.sql.Row;
import org.apache.spark.sql.hive.HiveContext;
import org.bson.Document;

/**
 * @author soulChun
 * @create 2018-12-15-16:17
 */
public class SparkHiveToMg {
    public static void main(String[] args) {

        SparkConf conf = new SparkConf().setAppName("SparkHiveToMg").setMaster("local[2]");
        //如何你的密碼中有@符號 請用%40代替
        conf.set("spark.mongodb.output.uri", "mongodb://root:[email protected]/soul_db.emp");
        JavaSparkContext jsc =  new JavaSparkContext(conf);
        HiveContext hc = new HiveContext(jsc);
        Dataset<Row> df  =hc.table("soul.emp");
        JavaRDD<Row> rdd = df.toJavaRDD();
        //upsert
        JavaRDD<ReplaceOneModel<Document>> rddUpsert= rdd.map(new Function<Row, ReplaceOneModel<Document>>() {
            public ReplaceOneModel<Document> call(Row row) throws Exception {
                Document doc = new Document();
                doc.put("empno",row.get(0));
                doc.put("ename",row.get(1));
                doc.put("job",row.get(2));
                doc.put("age",row.get(3));
                doc.put("deptno",row.get(4));
//                Document modifiers = new Document();
//                modifiers.put("$set",doc);
                return new ReplaceOneModel<Document>(Filters.eq("empno",doc.get("empno")),doc,new UpdateOptions().upsert(true));
            }
        });

        MongoUtils.upsertSave(rddUpsert.rdd(),rddUpsert.classTag());
        jsc.stop();
    }
}

會發現數據已恢復
在這裏插入圖片描述

如果對數據進行Update或者Upsert的時候記得將
Filters.eq(“empno”,doc.get(“empno”)
關聯字段empno在MongoDB中設置成索引字段,可以提高性能。如果公司有調度平臺(支持動態傳參)可以將上面的內容改寫成插件,支持任意Hive表的遷移。

發表評論
所有評論
還沒有人評論,想成為第一個評論的人麼? 請在上方評論欄輸入並且點擊發布.
相關文章