SparkCore 流量統計

package com.xzdream.spark

import org.apache.spark.{SparkConf, SparkContext}

/**
  * Log App
  */
object SparkContextApp {
  def main(args: Array[String]): Unit = {
    val sparkConf = new SparkConf();
    sparkConf.setAppName("LogApp").setMaster("local[2]");

    val sc = new SparkContext(sparkConf);

    val lines = sc.textFile("file:///Users/hadoop/scala/spark_demo1/src/main/logs/2020-5-11.log");
//    lines.take(3).foreach(println)

    /*
    //讀取每一行數據
    lines.map(x => {
      val splits = x.split("\t")
      val length = splits.length
      if(length == 4){
        val domain = splits(0)
        var traffic = 0L
        try{
          traffic = splits(1).toLong
        }catch {
          case e:Exception => 0L
        }

        (domain,traffic)
      }else{
        ("-",0L)
      }

    }).reduceByKey(_+_).collect.foreach(println)
     */

    //求省份訪問量的top10

    lines.map(x=>{
      val splits = x.split("\t")
      val length = splits.length
      if(length == 4){
        val province = splits(3)
        (province,1)
      }else{
        ('-',1)
      }
    }).reduceByKey(_+_).sortBy(_._2,false).take(10).foreach(println)


    sc.stop();
  }
}

 

發表評論
所有評論
還沒有人評論,想成為第一個評論的人麼? 請在上方評論欄輸入並且點擊發布.
相關文章