Flink1.11.2写hive提示UnknownHostException

classic Classic list List threaded Threaded
2 messages Options
Reply | Threaded
Open this post in threaded view
|

Flink1.11.2写hive提示UnknownHostException

smallwong
环境:Flink1.11.2, CDH5.13.3(Hive1.1.0)
已使用的方法: flink-conf.yaml中设置env.yarn.conf.dir,另外也检测集群配置,hdfs-site.xml没问题,但都无效
异常:
Caused by: java.net.UnknownHostException: nameservice
        at
org.apache.hadoop.security.SecurityUtil.buildTokenService(SecurityUtil.java:406)
~[hadoop-common.jar:?]
        at
org.apache.hadoop.hdfs.NameNodeProxies.createNonHAProxy(NameNodeProxies.java:310)
~[hadoop-hdfs.jar:?]
        at
org.apache.hadoop.hdfs.NameNodeProxies.createProxy(NameNodeProxies.java:176)
~[hadoop-hdfs.jar:?]
        at org.apache.hadoop.hdfs.DFSClient.<init>(DFSClient.java:735)
~[hadoop-hdfs.jar:?]
        at org.apache.hadoop.hdfs.DFSClient.<init>(DFSClient.java:678)
~[hadoop-hdfs.jar:?]
        at
org.apache.hadoop.hdfs.DistributedFileSystem.initialize(DistributedFileSystem.java:158)
~[hadoop-hdfs.jar:?]
        at org.apache.hadoop.fs.FileSystem.createFileSystem(FileSystem.java:2816)
~[hadoop-common.jar:?]
        at org.apache.hadoop.fs.FileSystem.access$200(FileSystem.java:98)
~[hadoop-common.jar:?]
        at org.apache.hadoop.fs.FileSystem$Cache.getInternal(FileSystem.java:2853)
~[hadoop-common.jar:?]
        at org.apache.hadoop.fs.FileSystem$Cache.get(FileSystem.java:2835)
~[hadoop-common.jar:?]
        at org.apache.hadoop.fs.FileSystem.get(FileSystem.java:387)
~[hadoop-common.jar:?]
        at
org.apache.flink.formats.hadoop.bulk.committer.HadoopRenameFileCommitter.generateTempFilePath(HadoopRenameFileCommitter.java:112)
~[flink-connector-hive_2.11-1.11.2.jar:1.11.2]
        at
org.apache.flink.formats.hadoop.bulk.committer.HadoopRenameFileCommitter.<init>(HadoopRenameFileCommitter.java:48)
~[flink-connector-hive_2.11-1.11.2.jar:1.11.2]
        at
org.apache.flink.formats.hadoop.bulk.DefaultHadoopFileCommitterFactory.create(DefaultHadoopFileCommitterFactory.java:40)
~[flink-connector-hive_2.11-1.11.2.jar:1.11.2]
        at
org.apache.flink.formats.hadoop.bulk.HadoopPathBasedPartFileWriter$HadoopPathBasedBucketWriter.openNewInProgressFile(HadoopPathBasedPartFileWriter.java:254)
~[flink-connector-hive_2.11-1.11.2.jar:1.11.2]
        at
org.apache.flink.formats.hadoop.bulk.HadoopPathBasedPartFileWriter$HadoopPathBasedBucketWriter.openNewInProgressFile(HadoopPathBasedPartFileWriter.java:230)
~[flink-connector-hive_2.11-1.11.2.jar:1.11.2]
        at
org.apache.flink.streaming.api.functions.sink.filesystem.Bucket.rollPartFile(Bucket.java:209)
~[flink-dist_2.11-1.11.2.jar:1.11.2]
        at
org.apache.flink.streaming.api.functions.sink.filesystem.Bucket.write(Bucket.java:200)
~[flink-dist_2.11-1.11.2.jar:1.11.2]
        at
org.apache.flink.streaming.api.functions.sink.filesystem.Buckets.onElement(Buckets.java:282)
~[flink-dist_2.11-1.11.2.jar:1.11.2]
        at
org.apache.flink.streaming.api.functions.sink.filesystem.StreamingFileSinkHelper.onElement(StreamingFileSinkHelper.java:104)
~[flink-dist_2.11-1.11.2.jar:1.11.2]
        at
org.apache.flink.table.filesystem.stream.StreamingFileWriter.processElement(StreamingFileWriter.java:119)
~[flink-table-blink_2.11-1.11.2.jar:1.11.2]
        at
org.apache.flink.streaming.runtime.tasks.OperatorChain$CopyingChainingOutput.pushToOperator(OperatorChain.java:717)
~[flink-dist_2.11-1.11.2.jar:1.11.2]
        at
org.apache.flink.streaming.runtime.tasks.OperatorChain$CopyingChainingOutput.collect(OperatorChain.java:692)
~[flink-dist_2.11-1.11.2.jar:1.11.2]
        at
org.apache.flink.streaming.runtime.tasks.OperatorChain$CopyingChainingOutput.collect(OperatorChain.java:672)
~[flink-dist_2.11-1.11.2.jar:1.11.2]

        at
org.apache.flink.streaming.api.operators.CountingOutput.collect(CountingOutput.java:52)
~[flink-dist_2.11-1.11.2.jar:1.11.2]
        at
org.apache.flink.streaming.api.operators.CountingOutput.collect(CountingOutput.java:30)
~[flink-dist_2.11-1.11.2.jar:1.11.2]
        at SourceConversion$3.processElement(Unknown Source) ~[?:?]
        at
org.apache.flink.streaming.runtime.tasks.OperatorChain$CopyingChainingOutput.pushToOperator(OperatorChain.java:717)
~[flink-dist_2.11-1.11.2.jar:1.11.2]
        at
org.apache.flink.streaming.runtime.tasks.OperatorChain$CopyingChainingOutput.collect(OperatorChain.java:692)
~[flink-dist_2.11-1.11.2.jar:1.11.2]
        at
org.apache.flink.streaming.runtime.tasks.OperatorChain$CopyingChainingOutput.collect(OperatorChain.java:672)
~[flink-dist_2.11-1.11.2.jar:1.11.2]
        at
org.apache.flink.streaming.api.operators.CountingOutput.collect(CountingOutput.java:52)
~[flink-dist_2.11-1.11.2.jar:1.11.2]
        at
org.apache.flink.streaming.api.operators.CountingOutput.collect(CountingOutput.java:30)
~[flink-dist_2.11-1.11.2.jar:1.11.2]
        at
org.apache.flink.streaming.api.operators.StreamMap.processElement(StreamMap.java:41)
~[flink-dist_2.11-1.11.2.jar:1.11.2]
        at
org.apache.flink.streaming.runtime.tasks.OperatorChain$CopyingChainingOutput.pushToOperator(OperatorChain.java:717)
~[flink-dist_2.11-1.11.2.jar:1.11.2]
        at
org.apache.flink.streaming.runtime.tasks.OperatorChain$CopyingChainingOutput.collect(OperatorChain.java:692)
~[flink-dist_2.11-1.11.2.jar:1.11.2]
        at
org.apache.flink.streaming.runtime.tasks.OperatorChain$CopyingChainingOutput.collect(OperatorChain.java:672)
~[flink-dist_2.11-1.11.2.jar:1.11.2]
        at
org.apache.flink.streaming.api.operators.CountingOutput.collect(CountingOutput.java:52)
~[flink-dist_2.11-1.11.2.jar:1.11.2]
        at
org.apache.flink.streaming.api.operators.CountingOutput.collect(CountingOutput.java:30)
~[flink-dist_2.11-1.11.2.jar:1.11.2]
        at
org.apache.flink.streaming.api.operators.StreamSourceContexts$NonTimestampContext.collect(StreamSourceContexts.java:104)
~[flink-dist_2.11-1.11.2.jar:1.11.2]
        at
org.apache.flink.streaming.api.operators.StreamSourceContexts$NonTimestampContext.collectWithTimestamp(StreamSourceContexts.java:111)
~[flink-dist_2.11-1.11.2.jar:1.11.2]
        at
org.apache.flink.streaming.connectors.kafka.internals.AbstractFetcher.emitRecordsWithTimestamps(AbstractFetcher.java:352)
~[ae3cd858c38340e48cede6e6d9322451.jar:?]
        at
org.apache.flink.streaming.connectors.kafka.internal.KafkaFetcher.partitionConsumerRecordsHandler(KafkaFetcher.java:185)
~[ae3cd858c38340e48cede6e6d9322451.jar:?]
        at
org.apache.flink.streaming.connectors.kafka.internal.KafkaFetcher.runFetchLoop(KafkaFetcher.java:141)
~[ae3cd858c38340e48cede6e6d9322451.jar:?]
        at
org.apache.flink.streaming.connectors.kafka.FlinkKafkaConsumerBase.runWithPartitionDiscovery(FlinkKafkaConsumerBase.java:765)
~[ae3cd858c38340e48cede6e6d9322451.jar:?]
        at
org.apache.flink.streaming.connectors.kafka.FlinkKafkaConsumerBase.run(FlinkKafkaConsumerBase.java:757)
~[ae3cd858c38340e48cede6e6d9322451.jar:?]
        at
org.apache.flink.streaming.api.operators.StreamSource.run(StreamSource.java:101)
~[flink-dist_2.11-1.11.2.jar:1.11.2]
        at
org.apache.flink.streaming.api.operators.StreamSource.run(StreamSource.java:63)
~[flink-dist_2.11-1.11.2.jar:1.11.2]
        at
org.apache.flink.streaming.runtime.tasks.SourceStreamTask$LegacySourceFunctionThread.run(SourceStreamTask.java:213)
~[flink-dist_2.11-1.11.2.jar:1.11.2]



--
Sent from: http://apache-flink.147419.n8.nabble.com/
Reply | Threaded
Open this post in threaded view
|

Re: Flink1.11.2写hive提示UnknownHostException

Yang Wang
一般UnknownHostException的报错就是因为HDFS client没有解析到正确的hdfs-site.xml配置,
导致把nameservice当成hostname来解析了,你可以看JobManager以及TaskManager的log,
查看里面打出来的classpath有没有hadoop conf的目录


Best,
Yang

smallwong <[hidden email]> 于2020年12月7日周一 上午10:04写道:

> 环境:Flink1.11.2, CDH5.13.3(Hive1.1.0)
> 已使用的方法: flink-conf.yaml中设置env.yarn.conf.dir,另外也检测集群配置,hdfs-site.xml没问题,但都无效
> 异常:
> Caused by: java.net.UnknownHostException: nameservice
>         at
>
> org.apache.hadoop.security.SecurityUtil.buildTokenService(SecurityUtil.java:406)
> ~[hadoop-common.jar:?]
>         at
>
> org.apache.hadoop.hdfs.NameNodeProxies.createNonHAProxy(NameNodeProxies.java:310)
> ~[hadoop-hdfs.jar:?]
>         at
>
> org.apache.hadoop.hdfs.NameNodeProxies.createProxy(NameNodeProxies.java:176)
> ~[hadoop-hdfs.jar:?]
>         at org.apache.hadoop.hdfs.DFSClient.<init>(DFSClient.java:735)
> ~[hadoop-hdfs.jar:?]
>         at org.apache.hadoop.hdfs.DFSClient.<init>(DFSClient.java:678)
> ~[hadoop-hdfs.jar:?]
>         at
>
> org.apache.hadoop.hdfs.DistributedFileSystem.initialize(DistributedFileSystem.java:158)
> ~[hadoop-hdfs.jar:?]
>         at
> org.apache.hadoop.fs.FileSystem.createFileSystem(FileSystem.java:2816)
> ~[hadoop-common.jar:?]
>         at org.apache.hadoop.fs.FileSystem.access$200(FileSystem.java:98)
> ~[hadoop-common.jar:?]
>         at
> org.apache.hadoop.fs.FileSystem$Cache.getInternal(FileSystem.java:2853)
> ~[hadoop-common.jar:?]
>         at org.apache.hadoop.fs.FileSystem$Cache.get(FileSystem.java:2835)
> ~[hadoop-common.jar:?]
>         at org.apache.hadoop.fs.FileSystem.get(FileSystem.java:387)
> ~[hadoop-common.jar:?]
>         at
>
> org.apache.flink.formats.hadoop.bulk.committer.HadoopRenameFileCommitter.generateTempFilePath(HadoopRenameFileCommitter.java:112)
> ~[flink-connector-hive_2.11-1.11.2.jar:1.11.2]
>         at
>
> org.apache.flink.formats.hadoop.bulk.committer.HadoopRenameFileCommitter.<init>(HadoopRenameFileCommitter.java:48)
> ~[flink-connector-hive_2.11-1.11.2.jar:1.11.2]
>         at
>
> org.apache.flink.formats.hadoop.bulk.DefaultHadoopFileCommitterFactory.create(DefaultHadoopFileCommitterFactory.java:40)
> ~[flink-connector-hive_2.11-1.11.2.jar:1.11.2]
>         at
>
> org.apache.flink.formats.hadoop.bulk.HadoopPathBasedPartFileWriter$HadoopPathBasedBucketWriter.openNewInProgressFile(HadoopPathBasedPartFileWriter.java:254)
> ~[flink-connector-hive_2.11-1.11.2.jar:1.11.2]
>         at
>
> org.apache.flink.formats.hadoop.bulk.HadoopPathBasedPartFileWriter$HadoopPathBasedBucketWriter.openNewInProgressFile(HadoopPathBasedPartFileWriter.java:230)
> ~[flink-connector-hive_2.11-1.11.2.jar:1.11.2]
>         at
>
> org.apache.flink.streaming.api.functions.sink.filesystem.Bucket.rollPartFile(Bucket.java:209)
> ~[flink-dist_2.11-1.11.2.jar:1.11.2]
>         at
>
> org.apache.flink.streaming.api.functions.sink.filesystem.Bucket.write(Bucket.java:200)
> ~[flink-dist_2.11-1.11.2.jar:1.11.2]
>         at
>
> org.apache.flink.streaming.api.functions.sink.filesystem.Buckets.onElement(Buckets.java:282)
> ~[flink-dist_2.11-1.11.2.jar:1.11.2]
>         at
>
> org.apache.flink.streaming.api.functions.sink.filesystem.StreamingFileSinkHelper.onElement(StreamingFileSinkHelper.java:104)
> ~[flink-dist_2.11-1.11.2.jar:1.11.2]
>         at
>
> org.apache.flink.table.filesystem.stream.StreamingFileWriter.processElement(StreamingFileWriter.java:119)
> ~[flink-table-blink_2.11-1.11.2.jar:1.11.2]
>         at
>
> org.apache.flink.streaming.runtime.tasks.OperatorChain$CopyingChainingOutput.pushToOperator(OperatorChain.java:717)
> ~[flink-dist_2.11-1.11.2.jar:1.11.2]
>         at
>
> org.apache.flink.streaming.runtime.tasks.OperatorChain$CopyingChainingOutput.collect(OperatorChain.java:692)
> ~[flink-dist_2.11-1.11.2.jar:1.11.2]
>         at
>
> org.apache.flink.streaming.runtime.tasks.OperatorChain$CopyingChainingOutput.collect(OperatorChain.java:672)
> ~[flink-dist_2.11-1.11.2.jar:1.11.2]
>
>         at
>
> org.apache.flink.streaming.api.operators.CountingOutput.collect(CountingOutput.java:52)
> ~[flink-dist_2.11-1.11.2.jar:1.11.2]
>         at
>
> org.apache.flink.streaming.api.operators.CountingOutput.collect(CountingOutput.java:30)
> ~[flink-dist_2.11-1.11.2.jar:1.11.2]
>         at SourceConversion$3.processElement(Unknown Source) ~[?:?]
>         at
>
> org.apache.flink.streaming.runtime.tasks.OperatorChain$CopyingChainingOutput.pushToOperator(OperatorChain.java:717)
> ~[flink-dist_2.11-1.11.2.jar:1.11.2]
>         at
>
> org.apache.flink.streaming.runtime.tasks.OperatorChain$CopyingChainingOutput.collect(OperatorChain.java:692)
> ~[flink-dist_2.11-1.11.2.jar:1.11.2]
>         at
>
> org.apache.flink.streaming.runtime.tasks.OperatorChain$CopyingChainingOutput.collect(OperatorChain.java:672)
> ~[flink-dist_2.11-1.11.2.jar:1.11.2]
>         at
>
> org.apache.flink.streaming.api.operators.CountingOutput.collect(CountingOutput.java:52)
> ~[flink-dist_2.11-1.11.2.jar:1.11.2]
>         at
>
> org.apache.flink.streaming.api.operators.CountingOutput.collect(CountingOutput.java:30)
> ~[flink-dist_2.11-1.11.2.jar:1.11.2]
>         at
>
> org.apache.flink.streaming.api.operators.StreamMap.processElement(StreamMap.java:41)
> ~[flink-dist_2.11-1.11.2.jar:1.11.2]
>         at
>
> org.apache.flink.streaming.runtime.tasks.OperatorChain$CopyingChainingOutput.pushToOperator(OperatorChain.java:717)
> ~[flink-dist_2.11-1.11.2.jar:1.11.2]
>         at
>
> org.apache.flink.streaming.runtime.tasks.OperatorChain$CopyingChainingOutput.collect(OperatorChain.java:692)
> ~[flink-dist_2.11-1.11.2.jar:1.11.2]
>         at
>
> org.apache.flink.streaming.runtime.tasks.OperatorChain$CopyingChainingOutput.collect(OperatorChain.java:672)
> ~[flink-dist_2.11-1.11.2.jar:1.11.2]
>         at
>
> org.apache.flink.streaming.api.operators.CountingOutput.collect(CountingOutput.java:52)
> ~[flink-dist_2.11-1.11.2.jar:1.11.2]
>         at
>
> org.apache.flink.streaming.api.operators.CountingOutput.collect(CountingOutput.java:30)
> ~[flink-dist_2.11-1.11.2.jar:1.11.2]
>         at
>
> org.apache.flink.streaming.api.operators.StreamSourceContexts$NonTimestampContext.collect(StreamSourceContexts.java:104)
> ~[flink-dist_2.11-1.11.2.jar:1.11.2]
>         at
>
> org.apache.flink.streaming.api.operators.StreamSourceContexts$NonTimestampContext.collectWithTimestamp(StreamSourceContexts.java:111)
> ~[flink-dist_2.11-1.11.2.jar:1.11.2]
>         at
>
> org.apache.flink.streaming.connectors.kafka.internals.AbstractFetcher.emitRecordsWithTimestamps(AbstractFetcher.java:352)
> ~[ae3cd858c38340e48cede6e6d9322451.jar:?]
>         at
>
> org.apache.flink.streaming.connectors.kafka.internal.KafkaFetcher.partitionConsumerRecordsHandler(KafkaFetcher.java:185)
> ~[ae3cd858c38340e48cede6e6d9322451.jar:?]
>         at
>
> org.apache.flink.streaming.connectors.kafka.internal.KafkaFetcher.runFetchLoop(KafkaFetcher.java:141)
> ~[ae3cd858c38340e48cede6e6d9322451.jar:?]
>         at
>
> org.apache.flink.streaming.connectors.kafka.FlinkKafkaConsumerBase.runWithPartitionDiscovery(FlinkKafkaConsumerBase.java:765)
> ~[ae3cd858c38340e48cede6e6d9322451.jar:?]
>         at
>
> org.apache.flink.streaming.connectors.kafka.FlinkKafkaConsumerBase.run(FlinkKafkaConsumerBase.java:757)
> ~[ae3cd858c38340e48cede6e6d9322451.jar:?]
>         at
>
> org.apache.flink.streaming.api.operators.StreamSource.run(StreamSource.java:101)
> ~[flink-dist_2.11-1.11.2.jar:1.11.2]
>         at
>
> org.apache.flink.streaming.api.operators.StreamSource.run(StreamSource.java:63)
> ~[flink-dist_2.11-1.11.2.jar:1.11.2]
>         at
>
> org.apache.flink.streaming.runtime.tasks.SourceStreamTask$LegacySourceFunctionThread.run(SourceStreamTask.java:213)
> ~[flink-dist_2.11-1.11.2.jar:1.11.2]
>
>
>
> --
> Sent from: http://apache-flink.147419.n8.nabble.com/
>