java.lang.NoSuchMethodError: com.facebook.fb303.FacebookService$Client.sendBaseOneway(Ljava/lang/String;Lorg/apache/thrift/TBase;)V

java.lang.NoSuchMethodError: com.facebook.fb303.FacebookService$Client.sendBaseOneway(Ljava/lang/String;Lorg/apache/thrift/TBase;)V

我有以下代码:

  val warehouseLocation = new File("spark-warehouse").getAbsolutePath
  implicit val spark = SparkSession
    .builder
    .appName("test")
    .config("spark.sql.warehouse.dir", warehouseLocation)
    .config("hive.execution.engine","spark")
    .enableHiveSupport()
    .getOrCreate

  spark.sql("CREATE EXTERNAL TABLE IF NOT EXISTS person(name string)  STORED AS PARQUET LOCATION '/user/my_user/data/my_data.parquet")

我得到以下错误

 client token: Token { kind: YARN_CLIENT_TOKEN, service:  }
 diagnostics: User class threw exception: java.lang.NoSuchMethodError: com.facebook.fb303.FacebookService$Client.sendBaseOneway(Ljava/lang/String;Lorg/apache/thrift/TBase;)V
at com.facebook.fb303.FacebookService$Client.send_shutdown(FacebookService.java:436)
at com.facebook.fb303.FacebookService$Client.shutdown(FacebookService.java:430)
at org.apache.hadoop.hive.metastore.HiveMetaStoreClient.close(HiveMetaStoreClient.java:492)
at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
at java.lang.reflect.Method.invoke(Method.java:498)
at org.apache.hadoop.hive.metastore.RetryingMetaStoreClient.invoke(RetryingMetaStoreClient.java:156)
at com.sun.proxy.$Proxy33.close(Unknown Source)
at org.apache.hadoop.hive.ql.metadata.Hive.close(Hive.java:291)
at org.apache.hadoop.hive.ql.metadata.Hive.access[=11=]0(Hive.java:137)
at org.apache.hadoop.hive.ql.metadata.Hive.remove(Hive.java:157)
at org.apache.hadoop.hive.ql.metadata.Hive.closeCurrent(Hive.java:261)
at org.apache.hadoop.hive.ql.metadata.Hive.get(Hive.java:231)
at org.apache.hadoop.hive.ql.metadata.Hive.get(Hive.java:208)
at org.apache.hadoop.hive.ql.session.SessionState.setAuthorizerV2Config(SessionState.java:768)
at org.apache.hadoop.hive.ql.session.SessionState.setupAuth(SessionState.java:739)
at org.apache.hadoop.hive.ql.session.SessionState.getAuthenticator(SessionState.java:1394)
at org.apache.hadoop.hive.ql.session.SessionState.getUserFromAuthenticator(SessionState.java:987)
at org.apache.hadoop.hive.ql.metadata.Table.getEmptyTable(Table.java:177)
at org.apache.hadoop.hive.ql.metadata.Table.<init>(Table.java:119)
at org.apache.spark.sql.hive.client.HiveClientImpl$.toHiveTable(HiveClientImpl.scala:898)
at org.apache.spark.sql.hive.client.HiveClientImpl$$anonfun$createTable.apply$mcV$sp(HiveClientImpl.scala:470)
at org.apache.spark.sql.hive.client.HiveClientImpl$$anonfun$createTable.apply(HiveClientImpl.scala:468)
at org.apache.spark.sql.hive.client.HiveClientImpl$$anonfun$createTable.apply(HiveClientImpl.scala:468)
at org.apache.spark.sql.hive.client.HiveClientImpl$$anonfun$withHiveState.apply(HiveClientImpl.scala:274)
at org.apache.spark.sql.hive.client.HiveClientImpl.liftedTree1(HiveClientImpl.scala:212)
at org.apache.spark.sql.hive.client.HiveClientImpl.retryLocked(HiveClientImpl.scala:211)
at org.apache.spark.sql.hive.client.HiveClientImpl.withHiveState(HiveClientImpl.scala:257)
at org.apache.spark.sql.hive.client.HiveClientImpl.createTable(HiveClientImpl.scala:468)
at org.apache.spark.sql.hive.HiveExternalCatalog$$anonfun$doCreateTable.apply$mcV$sp(HiveExternalCatalog.scala:258)
at org.apache.spark.sql.hive.HiveExternalCatalog$$anonfun$doCreateTable.apply(HiveExternalCatalog.scala:216)
at org.apache.spark.sql.hive.HiveExternalCatalog$$anonfun$doCreateTable.apply(HiveExternalCatalog.scala:216)
at org.apache.spark.sql.hive.HiveExternalCatalog.withClient(HiveExternalCatalog.scala:97)
at org.apache.spark.sql.hive.HiveExternalCatalog.doCreateTable(HiveExternalCatalog.scala:216)
at org.apache.spark.sql.catalyst.catalog.ExternalCatalog.createTable(ExternalCatalog.scala:119)
at org.apache.spark.sql.catalyst.catalog.SessionCatalog.createTable(SessionCatalog.scala:307)
at org.apache.spark.sql.execution.command.CreateTableCommand.run(tables.scala:128)
at org.apache.spark.sql.execution.command.ExecutedCommandExec.sideEffectResult$lzycompute(commands.scala:70)
at org.apache.spark.sql.execution.command.ExecutedCommandExec.sideEffectResult(commands.scala:68)
at org.apache.spark.sql.execution.command.ExecutedCommandExec.executeCollect(commands.scala:79)
at org.apache.spark.sql.Dataset$$anonfun.apply(Dataset.scala:190)
at org.apache.spark.sql.Dataset$$anonfun.apply(Dataset.scala:190)
at org.apache.spark.sql.Dataset$$anonfun.apply(Dataset.scala:3253)
at org.apache.spark.sql.execution.SQLExecution$.withNewExecutionId(SQLExecution.scala:77)
at org.apache.spark.sql.Dataset.withAction(Dataset.scala:3252)
at org.apache.spark.sql.Dataset.<init>(Dataset.scala:190)
at org.apache.spark.sql.Dataset$.ofRows(Dataset.scala:75)
at org.apache.spark.sql.SparkSession.sql(SparkSession.scala:638)
at fr.enedis.ctd.TestHive2$.delayedEndpoint$fr$enedis$ctd$TestHive2(TestHive2.scala:23)
at fr.enedis.ctd.TestHive2$delayedInit$body.apply(TestHive2.scala:6)
at scala.Function0$class.apply$mcV$sp(Function0.scala:34)
at scala.runtime.AbstractFunction0.apply$mcV$sp(AbstractFunction0.scala:12)
at scala.App$$anonfun$main.apply(App.scala:76)
at scala.App$$anonfun$main.apply(App.scala:76)
at scala.collection.immutable.List.foreach(List.scala:381)
at scala.collection.generic.TraversableForwarder$class.foreach(TraversableForwarder.scala:35)
at scala.App$class.main(App.scala:76)
at fr.enedis.ctd.TestHive2$.main(TestHive2.scala:6)
at fr.enedis.ctd.TestHive2.main(TestHive2.scala)
at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
at java.lang.reflect.Method.invoke(Method.java:498)
at org.apache.spark.deploy.yarn.ApplicationMaster$$anon.run(ApplicationMaster.scala:721)

这是我的依赖项:

  "org.apache.spark" %% "spark-core" % "2.3.2"% "provided",
  "org.apache.spark" %% "spark-sql" % "2.3.2"% "provided",
  "org.apache.phoenix" % "phoenix-core" % "4.7.0.2.6.5.102-5",
  "org.apache.phoenix" % "phoenix-spark2" % "4.7.0.2.6.5.0-292"

我尝试添加,但它不起作用: "org.apache.thrift" % "libfb303" % "0.9.3"

添加这个可以解决我的问题:"org.apache.thrift" % "libfb303" % "0.9.2"