在 JAAS 配置中找不到 'KafkaClient' 条目。系统 属性 'java.security.auth.login.config' 未设置
Could not find a 'KafkaClient' entry in the JAAS configuration. System property 'java.security.auth.login.config' is not set
我正在尝试从 spark 结构化流连接到 Kafka。
这个有效:
spark-shell --master local[1] \
--files /mypath/jaas_mh.conf \
--packages org.apache.spark:spark-sql-kafka-0-10_2.11:2.3.0 \
--conf "spark.driver.extraJavaOptions=-Djava.security.auth.login.config=jaas_mh.conf" \
--conf "spark.executor.extraJavaOptions=-Djava.security.auth.login.config=jaas_mh.conf" \
--num-executors 1 --executor-cores 1
但是,当我尝试以编程方式执行相同操作时..
object SparkHelper {
def getAndConfigureSparkSession() = {
val conf = new SparkConf()
.setAppName("Structured Streaming from Message Hub to Cassandra")
.setMaster("local[1]")
.set("spark.driver.extraJavaOptions", "-Djava.security.auth.login.config=jaas_mh.conf")
.set("spark.executor.extraJavaOptions", "-Djava.security.auth.login.config=jaas_mh.conf")
val sc = new SparkContext(conf)
sc.setLogLevel("WARN")
getSparkSession()
}
def getSparkSession() : SparkSession = {
val spark = SparkSession
.builder()
.getOrCreate()
spark.sparkContext.addFile("/mypath/jaas_mh.conf")
return spark
}
}
我收到错误:
Could not find a 'KafkaClient' entry in the JAAS configuration.
System property 'java.security.auth.login.config' is not set
有什么指点吗?
即使在 conf 中,您也应该为 .conf 文件提供完整路径或相对路径。
此外,当您创建 SparkConf 时,我发现您没有将其应用于当前的 SparkSession。
import org.apache.spark.SparkConf
import org.apache.spark.sql.SparkSession
object Driver extends App {
val confPath: String = "/Users/arcizon/IdeaProjects/spark/src/main/resources/jaas_mh.conf"
def getAndConfigureSparkSession(): SparkSession = {
val conf = new SparkConf()
.setAppName("Structured Streaming from Message Hub to Cassandra")
.setMaster("local[1]")
.set("spark.driver.extraJavaOptions", s"-Djava.security.auth.login.config=$confPath")
.set("spark.executor.extraJavaOptions", s"-Djava.security.auth.login.config=$confPath")
getSparkSession(conf)
}
def getSparkSession(conf: SparkConf): SparkSession = {
val spark = SparkSession
.builder()
.config(conf)
.getOrCreate()
spark.sparkContext.addFile(confPath)
spark.sparkContext.setLogLevel("WARN")
spark
}
val sparkSession: SparkSession = getAndConfigureSparkSession()
println(sparkSession.conf.get("spark.driver.extraJavaOptions"))
println(sparkSession.conf.get("spark.executor.extraJavaOptions"))
sparkSession.stop()
}
我正在尝试从 spark 结构化流连接到 Kafka。
这个有效:
spark-shell --master local[1] \
--files /mypath/jaas_mh.conf \
--packages org.apache.spark:spark-sql-kafka-0-10_2.11:2.3.0 \
--conf "spark.driver.extraJavaOptions=-Djava.security.auth.login.config=jaas_mh.conf" \
--conf "spark.executor.extraJavaOptions=-Djava.security.auth.login.config=jaas_mh.conf" \
--num-executors 1 --executor-cores 1
但是,当我尝试以编程方式执行相同操作时..
object SparkHelper {
def getAndConfigureSparkSession() = {
val conf = new SparkConf()
.setAppName("Structured Streaming from Message Hub to Cassandra")
.setMaster("local[1]")
.set("spark.driver.extraJavaOptions", "-Djava.security.auth.login.config=jaas_mh.conf")
.set("spark.executor.extraJavaOptions", "-Djava.security.auth.login.config=jaas_mh.conf")
val sc = new SparkContext(conf)
sc.setLogLevel("WARN")
getSparkSession()
}
def getSparkSession() : SparkSession = {
val spark = SparkSession
.builder()
.getOrCreate()
spark.sparkContext.addFile("/mypath/jaas_mh.conf")
return spark
}
}
我收到错误:
Could not find a 'KafkaClient' entry in the JAAS configuration.
System property 'java.security.auth.login.config' is not set
有什么指点吗?
即使在 conf 中,您也应该为 .conf 文件提供完整路径或相对路径。 此外,当您创建 SparkConf 时,我发现您没有将其应用于当前的 SparkSession。
import org.apache.spark.SparkConf
import org.apache.spark.sql.SparkSession
object Driver extends App {
val confPath: String = "/Users/arcizon/IdeaProjects/spark/src/main/resources/jaas_mh.conf"
def getAndConfigureSparkSession(): SparkSession = {
val conf = new SparkConf()
.setAppName("Structured Streaming from Message Hub to Cassandra")
.setMaster("local[1]")
.set("spark.driver.extraJavaOptions", s"-Djava.security.auth.login.config=$confPath")
.set("spark.executor.extraJavaOptions", s"-Djava.security.auth.login.config=$confPath")
getSparkSession(conf)
}
def getSparkSession(conf: SparkConf): SparkSession = {
val spark = SparkSession
.builder()
.config(conf)
.getOrCreate()
spark.sparkContext.addFile(confPath)
spark.sparkContext.setLogLevel("WARN")
spark
}
val sparkSession: SparkSession = getAndConfigureSparkSession()
println(sparkSession.conf.get("spark.driver.extraJavaOptions"))
println(sparkSession.conf.get("spark.executor.extraJavaOptions"))
sparkSession.stop()
}