如何将 json 格式作为 spark 提交中的一个参数传递?

how to pass json format as one argument in spark submit?

 spark-submit --class com.HelloWorld \
--master yarn --deploy-mode client \
--executor-memory 5g  /home/Hadoop-Work/HelloWorld.jar \
"/home/Hadoop-Work/application.properties" \
"Student_db,stud_info_table,ClassA,\"[\"{\"system\":\"stud_info_table|student_name\",\"actual\":\"stud_info_table|student_phone\",\"class_strength\":\"50\"}\"]\",DepartmentA,120"

这里arg(0)/home/Hadoop-Work/application.properties

arg(1)"Student_db,stud_info_table,ClassA,\"[\"{\"system\":\"stud_info_table|student_name\",\"actual\":\"stud_info_table|student_phone\",\"class_strength\":\"50\"}\"]\",DepartmentA,120"

object HelloWorld {

   def main(args: Array[String]){

    val input =args(1)
    val splited = input.split(",")
    val dbname = splited(0)
    val tablename = splited(1)
    val classname = splited(2)
    val jsonInputColumns = splited(3) // here I want json format as one argument ----> "[{system:stud_info_table|student_name,actual:stud_info_table|student_phone,class_strength:50}]"
    val departmentName = splited(4)
    val kafka_timeout_sec = splited(5)      
    }

}

你的问题是你在逗号分隔符上拆分,它也出现在你的 json 中。解决方法是使用另一个 * 不常见 * 分隔符,例如 ;;

我通过将您的代码更改为

使其工作
  object HelloWorld {

   def main(args: Array[String]){

    val input =args(1)
    val splited = input.split(";;")
    val dbname = splited(0)
    val tablename = splited(1)
    val classname = splited(2)
    val jsonInputColumns = splited(3) // here I want json format as one argument ----> "[{system:stud_info_table|student_name,actual:stud_info_table|student_phone,class_strength:50}]"
    val departmentName = splited(4)
    val kafka_timeout_sec = splited(5)      
    }

   }

以及你对 "Student_db;;stud_info_table;;ClassA;;\"[\"{\"system\":\"stud_info_table|student_name\",\"actual\":\"stud_info_table|student_phone\",\"class_strength\":\"50\"}\"]\";;DepartmentA;;120"

的论点