我正在尝试创建用户定义的函数,但是当我使用Eclipse和Maven编译时,我收到错误 没有TypeTag可用于String spark.udf.register(" checkBlankorNulls_udf",checkBlankorNulls(_:String,_:String):String)
在spark-shell中运行此代码不会产生任何错误。但是构建JAR会引发错误。
val spark = SparkSession.builder().appName("raw").config("spark.sql.warehouse.dir","C:\\tmp\spark-warehouse").getOrCreate()
import spark.implicits._
// user defined function to check null or blank values
def checkBlankorNulls(colName:String, colValue:String):String = {
if (colValue== null|| colValue=="null" || colValue ==" ")
{
throw new Exception(colName.concat(" field is null or empty. Partitioned fields cannot be null"))
}
else {
colValue
}
}
spark.udf.register("checkBlankorNulls_udf", checkBlankorNulls(_:String,_:String):String)