maven依賴:
<dependency> <groupId>org.apache.spark</groupId> <artifactId>spark-core_2.10</artifactId> <version>1.6.0</version> </dependency>
public class CoGroup { public static void main(String[] args) { /** * 創建spark配置對象SparkConf,設置spark運行時配置信息, * 例如通過setMaster來設置程序要連接的集羣的Master的URL,如果設置爲local, * spark爲本地運行 */ SparkConf conf = new SparkConf().setAppName("My first spark").setMaster("local"); /** * 創建JavaSparkContext對象 * SparkContext是spark所有功能的唯一入口, * SparkContext核心作用,初始化spark運行所需要的核心組件,同時還會負責spark程序在master的註冊。 * */ JavaSparkContext sc = new JavaSparkContext(conf); /** * 初始化學生集合 */ List<Tuple2<Integer,String>> nameList = Arrays.asList(new Tuple2<Integer,String>(1,"xiaoming"), new Tuple2<Integer,String>(2,"feifei"), new Tuple2<Integer,String>(3,"katong")); /** * 初始化分數集合 */ List<Tuple2<Integer,Integer>> scoreList = Arrays.asList( new Tuple2<Integer,Integer>(1,90), new Tuple2<Integer,Integer>(2,80), new Tuple2<Integer,Integer>(1,70), new Tuple2<Integer,Integer>(3,60), new Tuple2<Integer,Integer>(2,80), new Tuple2<Integer,Integer>(1,70)); //轉成rdd JavaPairRDD<Integer, String> names = sc.parallelizePairs(nameList); JavaPairRDD<Integer, Integer> scores = sc.parallelizePairs(scoreList); //聚合分組 JavaPairRDD<Integer, Tuple2<Iterable<String>, Iterable<Integer>>> cogroup = names.cogroup(scores); //打印 cogroup.foreach(new VoidFunction<Tuple2<Integer, Tuple2<Iterable<String>, Iterable<Integer>>>>() { public void call(Tuple2<Integer, Tuple2<Iterable<String>, Iterable<Integer>>> integerTuple2Tuple2) throws Exception { System.out.println(integerTuple2Tuple2._1+" "+integerTuple2Tuple2._2._1+" "+integerTuple2Tuple2._2._2); } }); //關閉 sc.close(); } }