查询hive时,会卡死在查询界面,fe.warn.log日志报错如下:
2024-10-30 19:34:33,798 WARN (thrift-server-pool-0|173) [FrontendServiceImpl.loadTxn2PC():1418] failed to commit txn 6776666: errCode = 2, detailMessage = transaction [6776666] not found
2024-10-30 19:34:37,825 WARN (mysql-nio-pool-0|257) [HiveScanNode.getSplits():213] get file split failed for table: ec_settlementtransactions
java.util.concurrent.CompletionException: java.util.concurrent.ExecutionException: org.apache.doris.datasource.CacheException: failed to get input splits for FileCacheKey{location='hdfs://nameservice1/warehouse/tablespace/external/hive/ec_ads_bi.db/ec_settlementtransactions/date_of_file=2024-10-28', inputFormat='org.apache.hadoop.mapred.TextInputFormat'} in catalog hive
at com.github.benmanes.caffeine.cache.LocalLoadingCache.lambda$newBulkMappingFunction$3(LocalLoadingCache.java:174) ~[hive-catalog-shade-2.0.1.jar:2.0.1]
at com.github.benmanes.caffeine.cache.LocalManualCache.bulkLoad(LocalManualCache.java:102) ~[hive-catalog-shade-2.0.1.jar:2.0.1]
at com.github.benmanes.caffeine.cache.LocalManualCache.getAll(LocalManualCache.java:89) ~[hive-catalog-shade-2.0.1.jar:2.0.1]
at com.github.benmanes.caffeine.cache.LocalLoadingCache.getAll(LocalLoadingCache.java:64) ~[hive-catalog-shade-2.0.1.jar:2.0.1]
at org.apache.doris.datasource.hive.HiveMetaStoreCache.getFilesByPartitions(HiveMetaStoreCache.java:493) ~[doris-fe.jar:1.2-SNAPSHOT]
at org.apache.doris.datasource.hive.HiveMetaStoreCache.getFilesByPartitionsWithCache(HiveMetaStoreCache.java:471) ~[doris-fe.jar:1.2-SNAPSHOT]
at org.apache.doris.datasource.hive.source.HiveScanNode.getFileSplitByPartitions(HiveScanNode.java:226) ~[doris-fe.jar:1.2-SNAPSHOT]
at org.apache.doris.datasource.hive.source.HiveScanNode.getSplits(HiveScanNode.java:205) ~[doris-fe.jar:1.2-SNAPSHOT]
at org.apache.doris.datasource.FileQueryScanNode.createScanRangeLocations(FileQueryScanNode.java:271) ~[doris-fe.jar:1.2-SNAPSHOT]
at org.apache.doris.datasource.FileQueryScanNode.doFinalize(FileQueryScanNode.java:226) ~[doris-fe.jar:1.2-SNAPSHOT]
at org.apache.doris.datasource.FileQueryScanNode.finalizeForNereids(FileQueryScanNode.java:218) ~[doris-fe.jar:1.2-SNAPSHOT]
at org.apache.doris.nereids.util.Utils.execWithUncheckedException(Utils.java:71) ~[doris-fe.jar:1.2-SNAPSHOT]
at org.apache.doris.nereids.glue.translator.PhysicalPlanTranslator.visitPhysicalFileScan(PhysicalPlanTranslator.java:592) ~[doris-fe.jar:1.2-SNAPSHOT]
at org.apache.doris.nereids.glue.translator.PhysicalPlanTranslator.visitPhysicalFileScan(PhysicalPlanTranslator.java:227) ~[doris-fe.jar:1.2-SNAPSHOT]
at org.apache.doris.nereids.trees.plans.physical.PhysicalFileScan.accept(PhysicalFileScan.java:109) ~[doris-fe.jar:1.2-SNAPSHOT]
at org.apache.doris.nereids.glue.translator.PhysicalPlanTranslator.visitPhysicalLimit(PhysicalPlanTranslator.java:1747) ~[doris-fe.jar:1.2-SNAPSHOT]
at org.apache.doris.nereids.glue.translator.PhysicalPlanTranslator.visitPhysicalLimit(PhysicalPlanTranslator.java:227) ~[doris-fe.jar:1.2-SNAPSHOT]
at org.apache.doris.nereids.trees.plans.physical.PhysicalLimit.accept(PhysicalLimit.java:159) ~[doris-fe.jar:1.2-SNAPSHOT]
at org.apache.doris.nereids.glue.translator.PhysicalPlanTranslator.visitPhysicalDistribute(PhysicalPlanTranslator.java:282) ~[doris-fe.jar:1.2-SNAPSHOT]
at org.apache.doris.nereids.glue.translator.PhysicalPlanTranslator.visitPhysicalDistribute(PhysicalPlanTranslator.java:227) ~[doris-fe.jar:1.2-SNAPSHOT]
at org.apache.doris.nereids.trees.plans.physical.PhysicalDistribute.accept(PhysicalDistribute.java:87) ~[doris-fe.jar:1.2-SNAPSHOT]
at org.apache.doris.nereids.glue.translator.PhysicalPlanTranslator.visitPhysicalLimit(PhysicalPlanTranslator.java:1747) ~[doris-fe.jar:1.2-SNAPSHOT]
at org.apache.doris.nereids.glue.translator.PhysicalPlanTranslator.visitPhysicalLimit(PhysicalPlanTranslator.java:227) ~[doris-fe.jar:1.2-SNAPSHOT]
at org.apache.doris.nereids.trees.plans.physical.PhysicalLimit.accept(PhysicalLimit.java:159) ~[doris-fe.jar:1.2-SNAPSHOT]
at org.apache.doris.nereids.glue.translator.PhysicalPlanTranslator.visitPhysicalResultSink(PhysicalPlanTranslator.java:379) ~[doris-fe.jar:1.2-SNAPSHOT]
at org.apache.doris.nereids.glue.translator.PhysicalPlanTranslator.visitPhysicalResultSink(PhysicalPlanTranslator.java:227) ~[doris-fe.jar:1.2-SNAPSHOT]
at org.apache.doris.nereids.trees.plans.physical.PhysicalResultSink.accept(PhysicalResultSink.java:70) ~[doris-fe.jar:1.2-SNAPSHOT]
at org.apache.doris.nereids.glue.translator.PhysicalPlanTranslator.translatePlan(PhysicalPlanTranslator.java:253) ~[doris-fe.jar:1.2-SNAPSHOT]
at org.apache.doris.nereids.NereidsPlanner.translate(NereidsPlanner.java:330) ~[doris-fe.jar:1.2-SNAPSHOT]
at org.apache.doris.nereids.NereidsPlanner.plan(NereidsPlanner.java:138) ~[doris-fe.jar:1.2-SNAPSHOT]
at org.apache.doris.qe.StmtExecutor.executeByNereids(StmtExecutor.java:722) ~[doris-fe.jar:1.2-SNAPSHOT]
at org.apache.doris.qe.StmtExecutor.execute(StmtExecutor.java:533) ~[doris-fe.jar:1.2-SNAPSHOT]
at org.apache.doris.qe.StmtExecutor.execute(StmtExecutor.java:512) ~[doris-fe.jar:1.2-SNAPSHOT]
at org.apache.doris.qe.ConnectProcessor.executeQuery(ConnectProcessor.java:307) ~[doris-fe.jar:1.2-SNAPSHOT]
at org.apache.doris.qe.ConnectProcessor.handleQuery(ConnectProcessor.java:203) ~[doris-fe.jar:1.2-SNAPSHOT]
at org.apache.doris.qe.MysqlConnectProcessor.handleQuery(MysqlConnectProcessor.java:177) ~[doris-fe.jar:1.2-SNAPSHOT]
at org.apache.doris.qe.MysqlConnectProcessor.dispatch(MysqlConnectProcessor.java:205) ~[doris-fe.jar:1.2-SNAPSHOT]
at org.apache.doris.qe.MysqlConnectProcessor.processOnce(MysqlConnectProcessor.java:258) ~[doris-fe.jar:1.2-SNAPSHOT]
at org.apache.doris.mysql.ReadListener.lambda$handleEvent$0(ReadListener.java:52) ~[doris-fe.jar:1.2-SNAPSHOT]
at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1149) ~[?:1.8.0_352-352]
at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:624) ~[?:1.8.0_352-352]
at java.lang.Thread.run(Thread.java:750) ~[?:1.8.0_352-352]
Caused by: java.util.concurrent.ExecutionException: org.apache.doris.datasource.CacheException: failed to get input splits for FileCacheKey{location='hdfs://nameservice1/warehouse/tablespace/external/hive/ec_ads_bi.db/ec_settlementtransactions/date_of_file=2024-10-28', inputFormat='org.apache.hadoop.mapred.TextInputFormat'} in catalog hive
at java.util.concurrent.FutureTask.report(FutureTask.java:122) ~[?:1.8.0_352-352]
at java.util.concurrent.FutureTask.get(FutureTask.java:192) ~[?:1.8.0_352-352]
at org.apache.doris.common.util.CacheBulkLoader.loadAll(CacheBulkLoader.java:47) ~[doris-fe.jar:1.2-SNAPSHOT]
at com.github.benmanes.caffeine.cache.LocalLoadingCache.lambda$newBulkMappingFunction$3(LocalLoadingCache.java:166) ~[hive-catalog-shade-2.0.1.jar:2.0.1]
... 41 more
Caused by: org.apache.doris.datasource.CacheException: failed to get input splits for FileCacheKey{location='hdfs://nameservice1/warehouse/tablespace/external/hive/ec_ads_bi.db/ec_settlementtransactions/date_of_file=2024-10-28', inputFormat='org.apache.hadoop.mapred.TextInputFormat'} in catalog hive
at org.apache.doris.datasource.hive.HiveMetaStoreCache.loadFiles(HiveMetaStoreCache.java:429) ~[doris-fe.jar:1.2-SNAPSHOT]
at org.apache.doris.datasource.hive.HiveMetaStoreCache.access$300(HiveMetaStoreCache.java:102) ~[doris-fe.jar:1.2-SNAPSHOT]
at org.apache.doris.datasource.hive.HiveMetaStoreCache$2.load(HiveMetaStoreCache.java:196) ~[doris-fe.jar:1.2-SNAPSHOT]
at org.apache.doris.datasource.hive.HiveMetaStoreCache$2.load(HiveMetaStoreCache.java:188) ~[doris-fe.jar:1.2-SNAPSHOT]
at org.apache.doris.common.util.CacheBulkLoader.lambda$null$0(CacheBulkLoader.java:42) ~[doris-fe.jar:1.2-SNAPSHOT]
at java.util.concurrent.FutureTask.run(FutureTask.java:266) ~[?:1.8.0_352-352]
... 3 more
Caused by: java.lang.RuntimeException: Call From w1plui-nifi01.aexpec.com/10.201.20.143 to w1plui-bdp03.aexpec.com.com:8020 failed on socket timeout exception: org.apache.hadoop.net.ConnectTimeoutException: 20000 millis timeout while waiting for channel to be ready for connect. ch : java.nio.channels.SocketChannel[connection-pending remote=w1plui-bdp03.aexpec.com.com/172.67.72.218:8020]; For more details see: http://wiki.apache.org/hadoop/SocketTimeout
at org.apache.doris.datasource.hive.HiveMetaStoreCache.getFileCache(HiveMetaStoreCache.java:384) ~[doris-fe.jar:1.2-SNAPSHOT]
at org.apache.doris.datasource.hive.HiveMetaStoreCache.loadFiles(HiveMetaStoreCache.java:415) ~[doris-fe.jar:1.2-SNAPSHOT]