spark 读取tidb时,报错

[ERROR] 2022-01-29 11:00:29.644 - [taskAppId=TASK-1423-5077852-6176558]:[244] - execute sql error
java.sql.SQLException: java.util.concurrent.ExecutionException: java.lang.RuntimeException: java.lang.IllegalArgumentException: invalid encoded hash data key prefix: m
at com.pingcap.com.google.common.base.Preconditions.checkArgument(Preconditions.java:141)
at com.pingcap.tikv.codec.MetaCodec.decodeHashDataKey(MetaCodec.java:70)
at com.pingcap.tikv.codec.MetaCodec.hashGetFields(MetaCodec.java:121)
at com.pingcap.tikv.catalog.CatalogTransaction.getDatabases(CatalogTransaction.java:73)
at com.pingcap.tikv.catalog.Catalog$CatalogCache.loadDatabases(Catalog.java:197)
at com.pingcap.tikv.catalog.Catalog$CatalogCache.(Catalog.java:138)
at com.pingcap.tikv.catalog.Catalog$CatalogCache.(Catalog.java:126)
at com.pingcap.tikv.catalog.Catalog.(Catalog.java:46)
at com.pingcap.tikv.TiSession.getCatalog(TiSession.java:135)
at com.pingcap.tispark.statistics.StatisticsManager$.initialize(StatisticsManager.scala:275)
at com.pingcap.tispark.statistics.StatisticsManager$.initStatisticsManager(StatisticsManager.scala:266)
at org.apache.spark.sql.TiContext.(TiContext.scala:64)
at org.apache.spark.sql.TiExtensions.getOrCreateTiContext(TiExtensions.scala:42)
at org.apache.spark.sql.TiExtensions$$anonfun$apply$5.apply(TiExtensions.scala:31)
at org.apache.spark.sql.TiExtensions$$anonfun$apply$5.apply(TiExtensions.scala:31)
at org.apache.spark.sql.extensions.TiResolutionRule.(rules.scala:37)
at org.apache.spark.sql.TiExtensions$$anonfun$apply$6.apply(TiExtensions.scala:31)
at org.apache.spark.sql.TiExtensions$$anonfun$apply$6.apply(TiExtensions.scala:31)
at org.apache.spark.sql.SparkSessionExtensions$$anonfun$buildResolutionRules$1.apply(SparkSessionExtensions.scala:78)
at org.apache.spark.sql.SparkSessionExtensions$$anonfun$buildResolutionRules$1.apply(SparkSessionExtensions.scala:78)
at scala.collection.TraversableLike$$anonfun$map$1.apply(TraversableLike.scala:234)
at scala.collection.TraversableLike$$anonfun$map$1.apply(TraversableLike.scala:234)
at scala.collection.mutable.ResizableArray$class.foreach(ResizableArray.scala:59)
at scala.collection.mutable.ArrayBuffer.foreach(ArrayBuffer.scala:48)
at scala.collection.TraversableLike$class.map(TraversableLike.scala:234)
at scala.collection.AbstractTraversable.map(Traversable.scala:104)
at org.apache.spark.sql.SparkSessionExtensions.buildResolutionRules(SparkSessionExtensions.scala:78)
at org.apache.spark.sql.internal.BaseSessionStateBuilder.customResolutionRules(BaseSessionStateBuilder.scala:183)
at org.apache.spark.sql.hive.HiveSessionStateBuilder$$anon$1.(HiveSessionStateBuilder.scala:74)
at org.apache.spark.sql.hive.HiveSessionStateBuilder.analyzer(HiveSessionStateBuilder.scala:69)
at org.apache.spark.sql.internal.BaseSessionStateBuilder$$anonfun$build$2.apply(BaseSessionStateBuilder.scala:293)
at org.apache.spark.sql.internal.BaseSessionStateBuilder$$anonfun$build$2.apply(BaseSessionStateBuilder.scala:293)
at org.apache.spark.sql.internal.SessionState.analyzer$lzycompute(SessionState.scala:79)
at org.apache.spark.sql.internal.SessionState.analyzer(SessionState.scala:79)
at org.apache.spark.sql.execution.QueryExecution.analyzed$lzycompute(QueryExecution.scala:57)
at org.apache.spark.sql.execution.QueryExecution.analyzed(QueryExecution.scala:55)
at org.apache.spark.sql.execution.QueryExecution.assertAnalyzed(QueryExecution.scala:47)
at org.apache.spark.sql.Dataset$.ofRows(Dataset.scala:78)
at org.apache.spark.sql.SparkSession.sql(SparkSession.scala:642)
at org.apache.livy.thriftserver.session.SqlJob.executeSql(SqlJob.java:72)
at org.apache.livy.thriftserver.session.SqlJob.call(SqlJob.java:62)
at org.apache.livy.thriftserver.session.SqlJob.call(SqlJob.java:33)
at org.apache.livy.rsc.driver.JobWrapper.call(JobWrapper.java:64)
at org.apache.livy.rsc.driver.JobWrapper.call(JobWrapper.java:31)
at java.util.concurrent.FutureTask.run(FutureTask.java:266)
at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1149)
at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:624)
at java.lang.Thread.run(Thread.java:748)

at org.apache.hive.jdbc.HiveStatement.waitForOperationToComplete(HiveStatement.java:348)
at org.apache.hive.jdbc.HiveStatement.execute(HiveStatement.java:251)
at org.apache.hive.jdbc.HiveStatement.executeUpdate(HiveStatement.java:448)
at org.apache.hive.jdbc.HivePreparedStatement.executeUpdate(HivePreparedStatement.java:119)
at org.apache.dolphinscheduler.server.worker.task.sql.SqlTask.executeFuncAndSql(SqlTask.java:238)
at org.apache.dolphinscheduler.server.worker.task.sql.SqlTask.handle(SqlTask.java:140)
at org.apache.dolphinscheduler.server.worker.runner.TaskExecuteThread.run(TaskExecuteThread.java:133)
at java.util.concurrent.Executors$RunnableAdapter.call(Executors.java:511)
at java.util.concurrent.FutureTask.run(FutureTask.java:266)
at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1149)
at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:624)
at java.lang.Thread.run(Thread.java:748)

若提问为性能优化、故障排查类问题,请下载脚本运行。终端输出的打印结果,请务必全选并复制粘贴上传。

java.lang.RuntimeException: java.lang.IllegalArgumentException: invalid encoded hash data key prefix: m
at com.pingcap.com.google.common.base.Preconditions.checkArgument(Preconditions.java:141)
at com.pingcap.tikv.codec.MetaCodec.decodeHashDataKey(MetaCodec.java:70)

从日志看是类型不受支持,你用的什么环境和版本? 是否能和官网提供的匹配得上?

1赞