Scala:在函数接口中使用隐式
Scala: Use implicits in with Function interface
在 Scala 中你可以写一个像这样的函数:
object Add extends ((Int, Int) => Int) {
def apply(a: Int, b: Int) = a + b
}
我想写一个像上面的函数,但我也想使用imlicit。类似于:
object DoSomething extends (Configuration, ??? => Dataframe) {
override def apply(config: Configuration)(implicit sparkSession: SparkSession): DataFrame = {
...
}
}
有谁知道,我该怎么做?
编辑:
object DoSomething extends (Configuration => SparkSession) {
override def apply(config: Configuration)(implicit sparkSession: SparkSession): DataFrame = {
val bootstrapServers = configuration.bootstrapServers
val topic = configuration.topic
sparkSession.readStream
.format("kafka")
.option("kafka.bootstrap.servers", bootstrapServers)
.option("subscribe", topic)
.load()
}
}
尝试
class DoSomething(implicit sparkSession: SparkSession) extends (Configuration => DataFrame) {
override def apply(config: Configuration): DataFrame = {
val bootstrapServers = configuration.bootstrapServers
val topic = configuration.topic
sparkSession.readStream
.format("kafka")
.option("kafka.bootstrap.servers", bootstrapServers)
.option("subscribe", topic)
.load()
}
}
在 Scala 中你可以写一个像这样的函数:
object Add extends ((Int, Int) => Int) {
def apply(a: Int, b: Int) = a + b
}
我想写一个像上面的函数,但我也想使用imlicit。类似于:
object DoSomething extends (Configuration, ??? => Dataframe) {
override def apply(config: Configuration)(implicit sparkSession: SparkSession): DataFrame = {
...
}
}
有谁知道,我该怎么做?
编辑:
object DoSomething extends (Configuration => SparkSession) {
override def apply(config: Configuration)(implicit sparkSession: SparkSession): DataFrame = {
val bootstrapServers = configuration.bootstrapServers
val topic = configuration.topic
sparkSession.readStream
.format("kafka")
.option("kafka.bootstrap.servers", bootstrapServers)
.option("subscribe", topic)
.load()
}
}
尝试
class DoSomething(implicit sparkSession: SparkSession) extends (Configuration => DataFrame) {
override def apply(config: Configuration): DataFrame = {
val bootstrapServers = configuration.bootstrapServers
val topic = configuration.topic
sparkSession.readStream
.format("kafka")
.option("kafka.bootstrap.servers", bootstrapServers)
.option("subscribe", topic)
.load()
}
}