spark初始化搞不定

【是否原创】是 or 否
【首发渠道】TiDB 社区 or 其他平台
【首发渠道链接】其他平台首发请附上对应链接
【正文】

val sparkConf = new SparkConf() .setIfMissing("spark.tispark.write.allow_spark_sql", "true") .setIfMissing("spark.master", "local[*]") .setIfMissing("spark.app.name", getClass.getName) .setIfMissing("spark.sql.extensions", "org.apache.spark.sql.TiExtensions") .setIfMissing("spark.tispark.pd.addresses", "192.168.98.63:32107") .setIfMissing("spark.tispark.tidb.addr", "192.168.98.63") .setIfMissing("spark.tispark.tidb.port", "30044") //.setIfMissing("spark.tispark.write.without_lock_table", "true")
val spark = SparkSession.builder.config(sparkConf).getOrCreate()
val sqlContext = spark.sqlContext

//readUsingScala(sqlContext)
writeUsingScala(sqlContext)
//useAnotherTiDB(sqlContext)

}

def readUsingScala(sqlContext: SQLContext): Unit = {
// use tidb config in spark config if does not provide in data source config
val tidbOptions: Map[String, String] = Map(
“tidb.addr”->“192.168.98.63”,
“tidb.port” -> “30044”,
“tidb.user” -> “root”,
“tidb.password” -> “”
)
val df = sqlContext.read
.format(“tidb”)
.options(tidbOptions)
.option(“database”, “chenxinhui”)
.option(“table”, “student”)
.load()
df.show()
}

报错

Exception in thread “main” java.lang.NullPointerException: Failed to init client for PD cluster.
at com.pingcap.com.google.common.base.Preconditions.checkNotNull(Preconditions.java:906)
at com.pingcap.tikv.PDClient.initCluster(PDClient.java:500)
at com.pingcap.tikv.PDClient.(PDClient.java:100)
at com.pingcap.tikv.PDClient.createRaw(PDClient.java:110)
at com.pingcap.tikv.TiSession.getPDClient(TiSession.java:128)
at com.pingcap.tikv.TiSession.getTimestamp(TiSession.java:112)
at com.pingcap.tikv.TiSession.createSnapshot(TiSession.java:116)
at com.pingcap.tispark.statistics.StatisticsManager$.initialize(StatisticsManager.scala:274)
at com.pingcap.tispark.statistics.StatisticsManager$.initStatisticsManager(StatisticsManager.scala:266)
at org.apache.spark.sql.TiContext.(TiContext.scala:63)
at org.apache.spark.sql.TiExtensions.getOrCreateTiContext(TiExtensions.scala:47)
at org.apache.spark.sql.TiExtensions.$anonfun$apply$4(TiExtensions.scala:37)
at org.apache.spark.sql.TiExtensions$.getTiContext(TiExtensions.scala:74)
at com.pingcap.tispark.TiDBDataSource.createRelation(TiDBDataSource.scala:71)
at org.apache.spark.sql.execution.datasources.DataSource.resolveRelation(DataSource.scala:354)
at org.apache.spark.sql.DataFrameReader.loadV1Source(DataFrameReader.scala:326)
at org.apache.spark.sql.DataFrameReader.$anonfun$load$3(DataFrameReader.scala:308)
at scala.Option.getOrElse(Option.scala:189)
at org.apache.spark.sql.DataFrameReader.load(DataFrameReader.scala:308)
at org.apache.spark.sql.DataFrameReader.load(DataFrameReader.scala:226)
at TiDataSourceExampleWithExtensions$.writeUsingScala(TiDataSourceExampleWithExtensions.scala:57)
at TiDataSourceExampleWithExtensions$.main(TiDataSourceExampleWithExtensions.scala:23)
at TiDataSourceExampleWithExtensions.main(TiDataSourceExampleWithExtensions.scala)

我运行的是readUsingScala(sqlContext)

为啥这么久了。才提示出来:joy::joy:

此话题已在最后回复的 1 分钟后被自动关闭。不再允许新回复。