org.apache.hadoop.hive.metastore.api.SerDeInfo; local class incompatible
完整報錯如下:
org.apache.flink.runtime.JobException: Recovery is suppressed by NoRestartBackoffTimeStrategy
????at org.apache.flink.runtime.executiongraph.failover.flip1.ExecutionFailureHandler.handleFailure(ExecutionFailureHandler.java:116)
????at org.apache.flink.runtime.executiongraph.failover.flip1.ExecutionFailureHandler.getFailureHandlingResult(ExecutionFailureHandler.java:78)
????at org.apache.flink.runtime.scheduler.DefaultScheduler.handleTaskFailure(DefaultScheduler.java:224)
????at org.apache.flink.runtime.scheduler.DefaultScheduler.maybeHandleTaskFailure(DefaultScheduler.java:217)
????at org.apache.flink.runtime.scheduler.DefaultScheduler.updateTaskExecutionStateInternal(DefaultScheduler.java:208)
????at org.apache.flink.runtime.scheduler.SchedulerBase.updateTaskExecutionState(SchedulerBase.java:610)
????at org.apache.flink.runtime.scheduler.SchedulerNG.updateTaskExecutionState(SchedulerNG.java:89)
????at org.apache.flink.runtime.jobmaster.JobMaster.updateTaskExecutionState(JobMaster.java:419)
????at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
????at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
????at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
????at java.lang.reflect.Method.invoke(Method.java:498)
????at org.apache.flink.runtime.rpc.akka.AkkaRpcActor.handleRpcInvocation(AkkaRpcActor.java:286)
????at org.apache.flink.runtime.rpc.akka.AkkaRpcActor.handleRpcMessage(AkkaRpcActor.java:201)
????at org.apache.flink.runtime.rpc.akka.FencedAkkaRpcActor.handleRpcMessage(FencedAkkaRpcActor.java:74)
????at org.apache.flink.runtime.rpc.akka.AkkaRpcActor.handleMessage(AkkaRpcActor.java:154)
????at akka.japi.pf.UnitCaseStatement.apply(CaseStatements.scala:26)
????at akka.japi.pf.UnitCaseStatement.apply(CaseStatements.scala:21)
????at scala.PartialFunction.applyOrElse(PartialFunction.scala:123)
????at scala.PartialFunction.applyOrElse$(PartialFunction.scala:122)
????at akka.japi.pf.UnitCaseStatement.applyOrElse(CaseStatements.scala:21)
????at scala.PartialFunction$OrElse.applyOrElse(PartialFunction.scala:171)
????at scala.PartialFunction$OrElse.applyOrElse(PartialFunction.scala:172)
????at scala.PartialFunction$OrElse.applyOrElse(PartialFunction.scala:172)
????at akka.actor.Actor.aroundReceive(Actor.scala:517)
????at akka.actor.Actor.aroundReceive$(Actor.scala:515)
????at akka.actor.AbstractActor.aroundReceive(AbstractActor.scala:225)
????at akka.actor.ActorCell.receiveMessage(ActorCell.scala:592)
????at akka.actor.ActorCell.invoke(ActorCell.scala:561)
????at akka.dispatch.Mailbox.processMailbox(Mailbox.scala:258)
????at akka.dispatch.Mailbox.run(Mailbox.scala:225)
????at akka.dispatch.Mailbox.exec(Mailbox.scala:235)
????at akka.dispatch.forkjoin.ForkJoinTask.doExec(ForkJoinTask.java:260)
????at akka.dispatch.forkjoin.ForkJoinPool$WorkQueue.runTask(ForkJoinPool.java:1339)
????at akka.dispatch.forkjoin.ForkJoinPool.runWorker(ForkJoinPool.java:1979)
????at akka.dispatch.forkjoin.ForkJoinWorkerThread.run(ForkJoinWorkerThread.java:107)
Caused by: org.apache.flink.connectors.hive.FlinkHiveException: java.io.InvalidClassException: org.apache.hadoop.hive.metastore.api.SerDeInfo; local class incompatible: stream classdesc serialVersionUID = -5715937580612311959, local class serialVersionUID = -2087389775906944850
????at org.apache.flink.connectors.hive.write.HiveWriterFactory.createRecordWriter(HiveWriterFactory.java:165)
????at org.apache.flink.connectors.hive.write.HiveBulkWriterFactory.create(HiveBulkWriterFactory.java:47)
????at org.apache.flink.formats.hadoop.bulk.HadoopPathBasedPartFileWriter$HadoopPathBasedBucketWriter.openNewInProgressFile(HadoopPathBasedPartFileWriter.java:257)
????at org.apache.flink.formats.hadoop.bulk.HadoopPathBasedPartFileWriter$HadoopPathBasedBucketWriter.openNewInProgressFile(HadoopPathBasedPartFileWriter.java:230)
????at org.apache.flink.streaming.api.functions.sink.filesystem.Bucket.rollPartFile(Bucket.java:226)
????at org.apache.flink.streaming.api.functions.sink.filesystem.Bucket.write(Bucket.java:207)
????at org.apache.flink.streaming.api.functions.sink.filesystem.Buckets.onElement(Buckets.java:290)
????at org.apache.flink.streaming.api.functions.sink.filesystem.StreamingFileSinkHelper.onElement(StreamingFileSinkHelper.java:104)
????at org.apache.flink.table.filesystem.stream.AbstractStreamingWriter.processElement(AbstractStreamingWriter.java:134)
????at org.apache.flink.streaming.runtime.tasks.CopyingChainingOutput.pushToOperator(CopyingChainingOutput.java:71)
????at org.apache.flink.streaming.runtime.tasks.CopyingChainingOutput.collect(CopyingChainingOutput.java:46)
????at org.apache.flink.streaming.runtime.tasks.CopyingChainingOutput.collect(CopyingChainingOutput.java:26)
????at org.apache.flink.streaming.api.operators.CountingOutput.collect(CountingOutput.java:52)
????at org.apache.flink.streaming.api.operators.CountingOutput.collect(CountingOutput.java:30)
????at org.apache.flink.streaming.api.operators.StreamFilter.processElement(StreamFilter.java:40)
????at org.apache.flink.streaming.runtime.tasks.CopyingChainingOutput.pushToOperator(CopyingChainingOutput.java:71)
????at org.apache.flink.streaming.runtime.tasks.CopyingChainingOutput.collect(CopyingChainingOutput.java:46)
????at org.apache.flink.streaming.runtime.tasks.CopyingChainingOutput.collect(CopyingChainingOutput.java:26)
????at org.apache.flink.streaming.api.operators.CountingOutput.collect(CountingOutput.java:52)
????at org.apache.flink.streaming.api.operators.CountingOutput.collect(CountingOutput.java:30)
????at StreamExecCalc$23.processElement(Unknown Source)
????at org.apache.flink.streaming.runtime.tasks.CopyingChainingOutput.pushToOperator(CopyingChainingOutput.java:71)
????at org.apache.flink.streaming.runtime.tasks.CopyingChainingOutput.collect(CopyingChainingOutput.java:46)
????at org.apache.flink.streaming.runtime.tasks.CopyingChainingOutput.collect(CopyingChainingOutput.java:26)
????at org.apache.flink.streaming.api.operators.CountingOutput.collect(CountingOutput.java:52)
????at org.apache.flink.streaming.api.operators.CountingOutput.collect(CountingOutput.java:30)
????at org.apache.flink.table.runtime.operators.wmassigners.WatermarkAssignerOperator.processElement(WatermarkAssignerOperator.java:123)
????at org.apache.flink.streaming.runtime.tasks.CopyingChainingOutput.pushToOperator(CopyingChainingOutput.java:71)
????at org.apache.flink.streaming.runtime.tasks.CopyingChainingOutput.collect(CopyingChainingOutput.java:46)
????at org.apache.flink.streaming.runtime.tasks.CopyingChainingOutput.collect(CopyingChainingOutput.java:26)
????at org.apache.flink.streaming.api.operators.CountingOutput.collect(CountingOutput.java:52)
????at org.apache.flink.streaming.api.operators.CountingOutput.collect(CountingOutput.java:30)
????at StreamExecCalc$5.processElement(Unknown Source)
????at org.apache.flink.streaming.runtime.tasks.CopyingChainingOutput.pushToOperator(CopyingChainingOutput.java:71)
????at org.apache.flink.streaming.runtime.tasks.CopyingChainingOutput.collect(CopyingChainingOutput.java:46)
????at org.apache.flink.streaming.runtime.tasks.CopyingChainingOutput.collect(CopyingChainingOutput.java:26)
????at org.apache.flink.streaming.api.operators.CountingOutput.collect(CountingOutput.java:52)
????at org.apache.flink.streaming.api.operators.CountingOutput.collect(CountingOutput.java:30)
????at org.apache.flink.streaming.api.operators.StreamSourceContexts$ManualWatermarkContext.processAndCollect(StreamSourceContexts.java:305)
????at org.apache.flink.streaming.api.operators.StreamSourceContexts$WatermarkContext.collect(StreamSourceContexts.java:394)
????at org.apache.flink.streaming.api.functions.source.datagen.DataGeneratorSource.run(DataGeneratorSource.java:114)
????at org.apache.flink.streaming.api.operators.StreamSource.run(StreamSource.java:100)
????at org.apache.flink.streaming.api.operators.StreamSource.run(StreamSource.java:63)
????at org.apache.flink.streaming.runtime.tasks.SourceStreamTask$LegacySourceFunctionThread.run(SourceStreamTask.java:215)
Caused by: java.io.InvalidClassException: org.apache.hadoop.hive.metastore.api.SerDeInfo; local class incompatible: stream classdesc serialVersionUID = -5715937580612311959, local class serialVersionUID = -2087389775906944850
????at java.io.ObjectStreamClass.initNonProxy(ObjectStreamClass.java:616)
????at java.io.ObjectInputStream.readNonProxyDesc(ObjectInputStream.java:1843)
????at java.io.ObjectInputStream.readClassDesc(ObjectInputStream.java:1713)
????at java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:2000)
????at java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1535)
????at java.io.ObjectInputStream.readObject(ObjectInputStream.java:422)
????at org.apache.flink.util.InstantiationUtil.deserializeObject(InstantiationUtil.java:576)
????at org.apache.flink.util.InstantiationUtil.deserializeObject(InstantiationUtil.java:562)
????at org.apache.flink.util.InstantiationUtil.deserializeObject(InstantiationUtil.java:550)
????at org.apache.flink.util.SerializedValue.deserializeValue(SerializedValue.java:58)
????at org.apache.flink.connectors.hive.CachedSerializedValue.deserializeValue(CachedSerializedValue.java:41)
????at org.apache.flink.connectors.hive.CachedSerializedValue.deserializeValue(CachedSerializedValue.java:47)
????at org.apache.flink.connectors.hive.write.HiveWriterFactory.checkInitialize(HiveWriterFactory.java:179)
????at org.apache.flink.connectors.hive.write.HiveWriterFactory.createRecordWriter(HiveWriterFactory.java:138)
????... 43 more
?
解決方案:
這個是因為flink-sql-connector-hive和$HIVE的版本不一致導致的.
注意嚴格檢查版本
總結
以上是生活随笔為你收集整理的org.apache.hadoop.hive.metastore.api.SerDeInfo; local class incompatible的全部內容,希望文章能夠幫你解決所遇到的問題。
- 上一篇: 中班教案《捡豆豆》反思
- 下一篇: barrier相關知識點整理(还没搞完)