spark-submit --class com.HelloWorld \
--master yarn --deploy-mode client \
--executor-memory 5g /home/Hadoop-Work/HelloWorld.jar \
"/home/Hadoop-Work/application.properties" \
"Student_db,stud_info_table,ClassA,\"[\"{\"system\":\"stud_info_table|student_name\",\"actual\":\"stud_info_table|student_phone\",\"class_strength\":\"50\"}\"]\",DepartmentA,120"
Here arg(0)
is /home/Hadoop-Work/application.properties
arg(1)
is "Student_db,stud_info_table,ClassA,\"[\"{\"system\":\"stud_info_table|student_name\",\"actual\":\"stud_info_table|student_phone\",\"class_strength\":\"50\"}\"]\",DepartmentA,120"
object HelloWorld {
def main(args: Array[String]){
val input =args(1)
val splited = input.split(",")
val dbname = splited(0)
val tablename = splited(1)
val classname = splited(2)
val jsonInputColumns = splited(3) // here I want json format as one argument ----> "[{system:stud_info_table|student_name,actual:stud_info_table|student_phone,class_strength:50}]"
val departmentName = splited(4)
val kafka_timeout_sec = splited(5)
}
}
Your problem is you are splitting on the comma separator, which is also present in your json. A workaround is to use another, * not common * separator, such as ;;
I made it work by changing your code to
object HelloWorld {
def main(args: Array[String]){
val input =args(1)
val splited = input.split(";;")
val dbname = splited(0)
val tablename = splited(1)
val classname = splited(2)
val jsonInputColumns = splited(3) // here I want json format as one argument ----> "[{system:stud_info_table|student_name,actual:stud_info_table|student_phone,class_strength:50}]"
val departmentName = splited(4)
val kafka_timeout_sec = splited(5)
}
}
and your arguments to "Student_db;;stud_info_table;;ClassA;;\"[\"{\"system\":\"stud_info_table|student_name\",\"actual\":\"stud_info_table|student_phone\",\"class_strength\":\"50\"}\"]\";;DepartmentA;;120"