tispark Error scanning data from region

spark版本是2.3
使用tispark jar包,tispark-core-2.1.8-spark_2.3-jar-with-dependencies.jar

执行spark-sql 一直会抛下面的错,简单的show database 都不行

正在分析,请稍等

能否提供一下 tidb 集群的版本信息

v3.0.6

好的,能发一下完整的日志信息吗,我估计在后面有详细的错误提示

这个 是完整的执行日志 20/04/26 16:51:57 WARN KVErrorHandler: Failed to send notification back to driver since CacheInvalidateCallBack is null in executor node. 20/04/26 16:49:08 ERROR SparkSQLDriver: Failed in [show databases] com.pingcap.tikv.exception.TiClientInternalException: Error scanning data from region. at com.pingcap.tikv.operation.iterator.ScanIterator.loadCache(ScanIterator.java:94) at com.pingcap.tikv.operation.iterator.ScanIterator.hasNext(ScanIterator.java:105) at com.pingcap.tikv.catalog.CatalogTransaction.hashGetFields(CatalogTransaction.java:115) at com.pingcap.tikv.catalog.CatalogTransaction.getDatabases(CatalogTransaction.java:140) at com.pingcap.tikv.catalog.Catalog$CatalogCache.loadDatabases(Catalog.java:112) at com.pingcap.tikv.catalog.Catalog$CatalogCache.(Catalog.java:49) at com.pingcap.tikv.catalog.Catalog$CatalogCache.(Catalog.java:44) at com.pingcap.tikv.catalog.Catalog.(Catalog.java:129) at com.pingcap.tikv.TiSession.getCatalog(TiSession.java:93) at org.apache.spark.sql.TiContext.(TiContext.scala:44) at org.apache.spark.sql.TiExtensions.getOrCreateTiContext(TiExtensions.scala:15) at org.apache.spark.sql.TiExtensions$$anonfun$apply$5.apply(TiExtensions.scala:24) at org.apache.spark.sql.TiExtensions$$anonfun$apply$5.apply(TiExtensions.scala:24) at org.apache.spark.sql.extensions.TiResolutionRule.(rules.scala:31) at org.apache.spark.sql.TiExtensions$$anonfun$apply$6.apply(TiExtensions.scala:24) at org.apache.spark.sql.TiExtensions$$anonfun$apply$6.apply(TiExtensions.scala:24) at org.apache.spark.sql.SparkSessionExtensions$$anonfun$buildResolutionRules$1.apply(SparkSessionExtensions.scala:75) at org.apache.spark.sql.SparkSessionExtensions$$anonfun$buildResolutionRules$1.apply(SparkSessionExtensions.scala:75) at scala.collection.TraversableLike$$anonfun$map$1.apply(TraversableLike.scala:234) at scala.collection.TraversableLike$$anonfun$map$1.apply(TraversableLike.scala:234) at scala.collection.mutable.ResizableArray$class.foreach(ResizableArray.scala:59) at scala.collection.mutable.ArrayBuffer.foreach(ArrayBuffer.scala:48) at scala.collection.TraversableLike$class.map(TraversableLike.scala:234) at scala.collection.AbstractTraversable.map(Traversable.scala:104) at org.apache.spark.sql.SparkSessionExtensions.buildResolutionRules(SparkSessionExtensions.scala:75) at org.apache.spark.sql.internal.BaseSessionStateBuilder.customResolutionRules(BaseSessionStateBuilder.scala:183) at org.apache.spark.sql.hive.HiveSessionStateBuilder$$anon$1.(HiveSessionStateBuilder.scala:73) at org.apache.spark.sql.hive.HiveSessionStateBuilder.analyzer(HiveSessionStateBuilder.scala:68) at org.apache.spark.sql.internal.BaseSessionStateBuilder$$anonfun$build$2.apply(BaseSessionStateBuilder.scala:293) at org.apache.spark.sql.internal.BaseSessionStateBuilder$$anonfun$build$2.apply(BaseSessionStateBuilder.scala:293) at org.apache.spark.sql.internal.SessionState.analyzer$lzycompute(SessionState.scala:79) at org.apache.spark.sql.internal.SessionState.analyzer(SessionState.scala:79) at org.apache.spark.sql.execution.QueryExecution.analyzed$lzycompute(QueryExecution.scala:57) at org.apache.spark.sql.execution.QueryExecution.analyzed(QueryExecution.scala:55) at org.apache.spark.sql.execution.QueryExecution.assertAnalyzed(QueryExecution.scala:47) at org.apache.spark.sql.Dataset$.ofRows(Dataset.scala:74) at org.apache.spark.sql.SparkSession.sql(SparkSession.scala:638) at org.apache.spark.sql.SQLContext.sql(SQLContext.scala:694) at org.apache.spark.sql.hive.thriftserver.SparkSQLDriver.run(SparkSQLDriver.scala:62) at org.apache.spark.sql.hive.thriftserver.SparkSQLCLIDriver.processCmd(SparkSQLCLIDriver.scala:364) at org.apache.hadoop.hive.cli.CliDriver.processLine(CliDriver.java:376) at org.apache.spark.sql.hive.thriftserver.SparkSQLCLIDriver$.main(SparkSQLCLIDriver.scala:272) at org.apache.spark.sql.hive.thriftserver.SparkSQLCLIDriver.main(SparkSQLCLIDriver.scala) at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) at java.lang.reflect.Method.invoke(Method.java:498) at org.apache.spark.deploy.JavaMainApplication.start(SparkApplication.scala:52) at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:906) at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:197) at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:227) at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:136) at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala) Caused by: com.pingcap.tikv.exception.TiClientInternalException: ScanResponse failed without a cause at com.pingcap.tikv.region.RegionStoreClient.isScanSuccess(RegionStoreClient.java:287) at com.pingcap.tikv.region.RegionStoreClient.scan(RegionStoreClient.java:278) at com.pingcap.tikv.region.RegionStoreClient.scan(RegionStoreClient.java:344) at com.pingcap.tikv.operation.iterator.ScanIterator.loadCache(ScanIterator.java:73) … 52 more com.pingcap.tikv.exception.TiClientInternalException: Error scanning data from region. at com.pingcap.tikv.operation.iterator.ScanIterator.loadCache(ScanIterator.java:94) at com.pingcap.tikv.operation.iterator.ScanIterator.hasNext(ScanIterator.java:105) at com.pingcap.tikv.catalog.CatalogTransaction.hashGetFields(CatalogTransaction.java:115) at com.pingcap.tikv.catalog.CatalogTransaction.getDatabases(CatalogTransaction.java:140) at com.pingcap.tikv.catalog.Catalog$CatalogCache.loadDatabases(Catalog.java:112) at com.pingcap.tikv.catalog.Catalog$CatalogCache.(Catalog.java:49) at com.pingcap.tikv.catalog.Catalog$CatalogCache.(Catalog.java:44) at com.pingcap.tikv.catalog.Catalog.(Catalog.java:129) at com.pingcap.tikv.TiSession.getCatalog(TiSession.java:93) at org.apache.spark.sql.TiContext.(TiContext.scala:44) at org.apache.spark.sql.TiExtensions.getOrCreateTiContext(TiExtensions.scala:15) at org.apache.spark.sql.TiExtensions$$anonfun$apply$5.apply(TiExtensions.scala:24) at org.apache.spark.sql.TiExtensions$$anonfun$apply$5.apply(TiExtensions.scala:24) at org.apache.spark.sql.extensions.TiResolutionRule.(rules.scala:31) at org.apache.spark.sql.TiExtensions$$anonfun$apply$6.apply(TiExtensions.scala:24) at org.apache.spark.sql.TiExtensions$$anonfun$apply$6.apply(TiExtensions.scala:24) at org.apache.spark.sql.SparkSessionExtensions$$anonfun$buildResolutionRules$1.apply(SparkSessionExtensions.scala:75) at org.apache.spark.sql.SparkSessionExtensions$$anonfun$buildResolutionRules$1.apply(SparkSessionExtensions.scala:75) at scala.collection.TraversableLike$$anonfun$map$1.apply(TraversableLike.scala:234) at scala.collection.TraversableLike$$anonfun$map$1.apply(TraversableLike.scala:234) at scala.collection.mutable.ResizableArray$class.foreach(ResizableArray.scala:59) at scala.collection.mutable.ArrayBuffer.foreach(ArrayBuffer.scala:48) at scala.collection.TraversableLike$class.map(TraversableLike.scala:234) at scala.collection.AbstractTraversable.map(Traversable.scala:104) at org.apache.spark.sql.SparkSessionExtensions.buildResolutionRules(SparkSessionExtensions.scala:75) at org.apache.spark.sql.internal.BaseSessionStateBuilder.customResolutionRules(BaseSessionStateBuilder.scala:183) at org.apache.spark.sql.hive.HiveSessionStateBuilder$$anon$1.(HiveSessionStateBuilder.scala:73) at org.apache.spark.sql.hive.HiveSessionStateBuilder.analyzer(HiveSessionStateBuilder.scala:68) at org.apache.spark.sql.internal.BaseSessionStateBuilder$$anonfun$build$2.apply(BaseSessionStateBuilder.scala:293) at org.apache.spark.sql.internal.BaseSessionStateBuilder$$anonfun$build$2.apply(BaseSessionStateBuilder.scala:293) at org.apache.spark.sql.internal.SessionState.analyzer$lzycompute(SessionState.scala:79) at org.apache.spark.sql.internal.SessionState.analyzer(SessionState.scala:79) at org.apache.spark.sql.execution.QueryExecution.analyzed$lzycompute(QueryExecution.scala:57) at org.apache.spark.sql.execution.QueryExecution.analyzed(QueryExecution.scala:55) at org.apache.spark.sql.execution.QueryExecution.assertAnalyzed(QueryExecution.scala:47) at org.apache.spark.sql.Dataset$.ofRows(Dataset.scala:74) at org.apache.spark.sql.SparkSession.sql(SparkSession.scala:638) at org.apache.spark.sql.SQLContext.sql(SQLContext.scala:694) at org.apache.spark.sql.hive.thriftserver.SparkSQLDriver.run(SparkSQLDriver.scala:62) at org.apache.spark.sql.hive.thriftserver.SparkSQLCLIDriver.processCmd(SparkSQLCLIDriver.scala:364) at org.apache.hadoop.hive.cli.CliDriver.processLine(CliDriver.java:376) at org.apache.spark.sql.hive.thriftserver.SparkSQLCLIDriver$.main(SparkSQLCLIDriver.scala:272) at org.apache.spark.sql.hive.thriftserver.SparkSQLCLIDriver.main(SparkSQLCLIDriver.scala) at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) at java.lang.reflect.Method.invoke(Method.java:498) at org.apache.spark.deploy.JavaMainApplication.start(SparkApplication.scala:52) at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:906) at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:197) at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:227) at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:136) at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala) Caused by: com.pingcap.tikv.exception.TiClientInternalException: ScanResponse failed without a cause at com.pingcap.tikv.region.RegionStoreClient.isScanSuccess(RegionStoreClient.java:287) at com.pingcap.tikv.region.RegionStoreClient.scan(RegionStoreClient.java:278) at com.pingcap.tikv.region.RegionStoreClient.scan(RegionStoreClient.java:344) at com.pingcap.tikv.operation.iterator.ScanIterator.loadCache(ScanIterator.java:73) … 52 more

方便的话,麻烦发一下 log 的完整文件,我需要 ... 52 more 之后的信息

我用spark-sql执行的,52more后面也没有了

启用debug有这个日志

一般 log 日志位于 spark 的 log 目录下,文件会带有启动时的时间戳。可以翻看一下

spark-sql --master yarn-client --driver-memory 4G --executor-memory 5G --num-executors 5 --executor-cores 5我用的spark-sql执行的,日志直接打印出来的
spark日志只有thift日志,不是执行日志的

之前连tidb安装在华为云就可以,换成阿里云就不行了

spark服务器连通pd 2379网络能通,但连接tikv 20160网络不通,开通端口就解决问题,感谢专业人士为我详细的解答,为你们的付出点赞

1 个赞

感谢你的反馈,:+1:

此话题已在最后回复的 1 分钟后被自动关闭。不再允许新回复。