spark sql 內置函數不支持在內部寫spark sql代碼

%spark

import org.apache.spark.sql.expressions.UserDefinedFunction
import org.apache.spark.sql.functions.udf
import org.apache.spark.sql.types._
import org.apache.spark.internal.Logging

val name = "zhangsan"

println(s"spark: $spark ms")

val customFunction = udf((arg1: String) => {
  //val sql = s"SELECT m.pos_code FROM ddp_pro_ods.KIWI_MYSITE m WHERE m.pos_code='96555'"
  val sql = "SELECT 'abc'"
  //val result = spark.sql(sql).as[String].limit(1).collect().headOption.getOrElse("")
  val result = spark.sql(sql)
  //println(s"spark----: $spark ms")
  name
  
},StringType)

spark.udf.register("customFunction", customFunction)

val arg1 = "A96555"
val result = spark.sql(s"SELECT customFunction('$arg1')")
result.show()


org.apache.spark.SparkException: Job aborted due to stage failure: Task 0 in stage 0.0 failed 1 times, most recent failure: Lost task 0.0 in stage 0.0 (TID 0, localhost, executor driver): org.apache.spark.SparkException: Failed to execute user defined function($anonfun$1: (string) => string)
 

發表評論
所有評論
還沒有人評論,想成為第一個評論的人麼? 請在上方評論欄輸入並且點擊發布.
相關文章