spark-submit --class com.HelloWorld \
--master yarn --deploy-mode client \
--executor-memory 5g /home/Hadoop-Work/HelloWorld.jar \
"/home/Hadoop-Work/application.properties" \
"Student_db,stud_info_table,ClassA,\"[\"{\"system\":\"stud_info_table|student_name\",\"actual\":\"stud_info_table|student_phone\",\"class_strength\":\"50\"}\"]\",DepartmentA,120"
这里arg(0)
是/home/Hadoop-Work/application.properties
arg(1)
是"Student_db,stud_info_table,ClassA,\"[\"{\"system\":\"stud_info_table|student_name\",\"actual\":\"stud_info_table|student_phone\",\"class_strength\":\"50\"}\"]\",DepartmentA,120"
object HelloWorld {
def main(args: Array[String]){
val input =args(1)
val splited = input.split(",")
val dbname = splited(0)
val tablename = splited(1)
val classname = splited(2)
val jsonInputColumns = splited(3) // here I want json format as one argument ----> "[{system:stud_info_table|student_name,actual:stud_info_table|student_phone,class_strength:50}]"
val departmentName = splited(4)
val kafka_timeout_sec = splited(5)
}
}
答案 0 :(得分:1)
您的问题是您正在使用逗号分隔符进行拆分,逗号分隔符也存在于json中。一种解决方法是使用另一个*不常见的*分隔符,例如;;
我将代码更改为
object HelloWorld {
def main(args: Array[String]){
val input =args(1)
val splited = input.split(";;")
val dbname = splited(0)
val tablename = splited(1)
val classname = splited(2)
val jsonInputColumns = splited(3) // here I want json format as one argument ----> "[{system:stud_info_table|student_name,actual:stud_info_table|student_phone,class_strength:50}]"
val departmentName = splited(4)
val kafka_timeout_sec = splited(5)
}
}
以及您对"Student_db;;stud_info_table;;ClassA;;\"[\"{\"system\":\"stud_info_table|student_name\",\"actual\":\"stud_info_table|student_phone\",\"class_strength\":\"50\"}\"]\";;DepartmentA;;120"