.apache.spark.sql.execution.streaming.MicroBatchExecution.runActivatedStream(MicroBatchExecution.scala:160)
at
org.apache.spark.sql.execution.streaming.StreamExecution.org$apache$spark$sql$execution$streaming$StreamExecution$$runStream(StreamExecution.scala:279)
... 1 more
21/09/03 15:18:56 INFO SparkContext: Invoking stop() from shutdown hook
igyu
k.sql.Row]
})(Encoders.javaSerialization(Row.getClass))
igyu
bmit(SparkSubmit.scala:195)
at org.apache.spark.deploy.SparkSubmit.doSubmit(SparkSubmit.scala:86)
at org.apache.spark.deploy.SparkSubmit$$anon$2.doSubmit(SparkSubmit.scala:926)
at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:935)
at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)
21/08/23 11:40:29 INFO util.ShutdownHookManager: Shutdown hook called
igyu
d2d/config.json
(No such file or directory)
but
igyu
ableName)))
path of hbaseBulkLoad and LoadIncrementalHFiles is the same
stagingFolder.getPath
and I hbaseBulkLoad expected local file
igyu
HandlerContext.java:747)
at
org.apache.hbase.thirdparty.io.netty.channel.AbstractChannelHandlerContext.writeAndFlush(AbstractChannelHandlerContext.java:801)
at
org.apache.hbase.thirdparty.io.netty.channel.DefaultChannelPipeline.writeAndFlush(DefaultChannelPipeline.java:1036)
at
org.apache.hbase.thirdparty.io.netty.channel.AbstractChannel.writeAndFlush(AbstractChannel.java:305)
... 9 more
igyu
")
proper.setProperty("hive.metastore.warehouse.dir","/user/hive/warehouse")
proper.setProperty("hive.metastore.uris", "thrift://bigdser1:9083")
use
sparkSession.sparkContext.hadoopConfiguration.addResource("D:\\file\\core-site.xml")
sparkSession.sparkContext.hadoopConfiguration.addResource("D:\\file\\hdfs-site.xml")
sparkSession.sparkContext.hadoopConfiguration.addResource("D:\\file\\hive-site.xml")
sparkSession.sparkContext.hadoopConfiguration.addResource("D:\\file\\yarn-site.xml")I
aslo get the same error
igyu
dir ('null') to the value of
spark.sql.warehouse.dir
('file:/D:/file/code/Java/jztsynctools/spark-warehouse/').
I think hive.metastore.warehouse.dir is null so I can find hivetest database
but I set
proper.setProperty("spark.sql.warehouse.dir", "/user/hive/warehouse")
proper.setProperty("hive.metastore.warehouse.dir","/user/hive/warehouse")
proper.setProperty("hive.metastore.uris", "thrift://bigdser1:9083")
igyu
(Dataset.scala:713)
at com.join.hive.reader.HiveReader.readFrom(HiveReader.scala:15)
at com.join.Synctool$.main(Synctool.scala:200)
at com.join.Synctool.main(Synctool.scala)
but I use LdapGroupsMapping
How can fix it?
igyu
la:99)
at
org.apache.spark.sql.hive.HiveExternalCatalog.getTable(HiveExternalCatalog.scala:736)
at
org.apache.spark.sql.catalyst.catalog.ExternalCatalogWithListener.getTable(ExternalCatalogWithListener.scala:146)
at
org.apache.spark.sql.catalyst.catalog.SessionCatalog.lookupRelation(SessionCatalog.scala:701)
at
org.apache.spark.sql.catalyst.analysis.Analyzer$ResolveRelations$.org$apache$spark$sql$catalyst$analysis$Analyzer$ResolveRelations$$lookupTableFromCatalog(Analyzer.scala:730)
igyu
ion.datasources.FileFormatWriter$.org$apache$spark$sql$execution$datasources$FileFormatWriter$$executeTask(FileFormatWriter.scala:248)
... 10 more
21/08/10 08:33:44 INFO SparkContext: Invoking stop() from shutdown hook
igyu
4 INFO SparkContext: Invoking stop() from shutdown hook
igyu
.Dataset.show(Dataset.scala:713)
at com.join.ftp.reader.FtpReader.readFrom(FtpReader.scala:40)
at com.join.synctool$.main(synctool.scala:41)
at com.join.synctool.main(synctool.scala)
21/08/09 11:15:08 INFO SparkContext: Invoking stop() from shutdown hook
igyu
new StructField("id", DataTypes.StringType, true),
new StructField("name", DataTypes.StringType, true),
new StructField("year", DataTypes.IntegerType, true),
new StructField("city", DataTypes.StringType, true)))
val DF = spark.createDataFrame(value,schemas)
How can I createDataFrame
igyu
la:99)
at
org.apache.spark.sql.hive.HiveExternalCatalog.getTable(HiveExternalCatalog.scala:736)
at
org.apache.spark.sql.catalyst.catalog.ExternalCatalogWithListener.getTable(ExternalCatalogWithListener.scala:146)
at
org.apache.spark.sql.catalyst.catalog.SessionCatalog.lookupRelation(SessionCatalog.scala:701)
at
org.apache.spark.sql.catalyst.analysis.Analyzer$ResolveRelations$.org$apache$spark$sql$catalyst$analysis$Analyzer$ResolveRelations$$lookupTableFromCatalog(Analyzer.scala:730)
igyu
.ParseUtils.parse(ParseUtils.java:68)
at org.apache.hadoop.hive.ql.Driver.compile(Driver.java:564)
at org.apache.hadoop.hive.ql.Driver.compileInternal(Driver.java:1425)
at org.apache.hadoop.hive.ql.Driver.compileAndRespond(Driver.java:1398)
at
org.apache.hive.service.cli.operation.SQLOperation.prepare(SQLOperation.java:205)
... 15 more
21/07/30 17:16:39 INFO SparkContext: Invoking stop() from shutdown hook
21/07/30 17:16:39 INFO SparkUI: Stopped Spark web UI at
http://WIN-20201231YGA:4040
igyu
I want read data from hive cluster1
and write data to hive cluster2
How can I do it?
notice: cluster1,cluster2 are enable kerberos
igyu
17 matches
Mail list logo