Spark學習07——創建DataFrame的方法

一、簡介

基於Row的DateSet就是DataFrame,即DataFrame是DateSet的一個子集,DataFrame只是DateSet的叫法

二、創建方法

1. 使用toDF函數創建DataFrame

object CreateDataFrameFun {
  def main(args: Array[String]): Unit = {
    val spark = SparkSession
      .builder()
      .master("local[*]")
      .appName("Spark SQL basic example")
      .getOrCreate()
    import spark.implicits._
    val df = Seq(
      (1, "First Value", java.sql.Date.valueOf("2010-01-01")),
      (2, "Second Value", java.sql.Date.valueOf("2010-02-01"))
    ).toDF("int_column", "string_column", "date_column")

    df.foreach(x =>println(x))
    spark.stop()
  }
  }

2. case class(bean) + toDF創建DataFrame

object CreateDataFrameFun {
  case class Person (name: String, age: Long)
  def main(args: Array[String]): Unit = {
    val spark = SparkSession
      .builder()
      .master("local[*]")
      .appName("Spark SQL basic example")
      .getOrCreate()
    import spark.implicits._

    // Create an RDD of Person objects from a text file, convert it to a Dataframe
    val peopleDF = spark.sparkContext
      .textFile("examples/src/main/resources/people.txt")
      .map(_.split(","))
      .map(attributes => Person(attributes(0), attributes(1).trim.toInt))
      .toDF()
    peopleDF.foreach(x =>println(x))
    spark.stop()
  }
  }

3. 使用createDataFrame函數創建DataFrame

object CreateDataFrameFun {
  def main(args: Array[String]): Unit = {
    val spark = SparkSession
      .builder()
      .master("local[*]")
      .appName("Spark SQL basic example")
      .getOrCreate()
    val sc = sparkSession.sparkContext

    import org.apache.spark.sql.types._
    val schema = StructType(List(
      StructField("integer_column", IntegerType, nullable = false),
      StructField("string_column", StringType, nullable = true),
      StructField("date_column", DateType, nullable = true)
    ))

    val rdd = sc.parallelize(Seq(
      Row(1, "First Value", java.sql.Date.valueOf("2010-01-01")),
      Row(2, "Second Value", java.sql.Date.valueOf("2010-02-01"))
    ))
    val df = sparkSession.sqlContext.createDataFrame(rdd, schema)
    df.foreach(x =>println(x))
    spark.stop()
  }
  }
發表評論
所有評論
還沒有人評論,想成為第一個評論的人麼? 請在上方評論欄輸入並且點擊發布.
相關文章