创建hive catalog查询数据无反应

Viewed 29

查询hive时,会卡死在查询界面,fe.warn.log日志报错如下:

2024-10-30 19:34:33,798 WARN (thrift-server-pool-0|173) [FrontendServiceImpl.loadTxn2PC():1418] failed to commit txn 6776666: errCode = 2, detailMessage = transaction [6776666] not found
2024-10-30 19:34:37,825 WARN (mysql-nio-pool-0|257) [HiveScanNode.getSplits():213] get file split failed for table: ec_settlementtransactions
java.util.concurrent.CompletionException: java.util.concurrent.ExecutionException: org.apache.doris.datasource.CacheException: failed to get input splits for FileCacheKey{location='hdfs://nameservice1/warehouse/tablespace/external/hive/ec_ads_bi.db/ec_settlementtransactions/date_of_file=2024-10-28', inputFormat='org.apache.hadoop.mapred.TextInputFormat'} in catalog hive
        at com.github.benmanes.caffeine.cache.LocalLoadingCache.lambda$newBulkMappingFunction$3(LocalLoadingCache.java:174) ~[hive-catalog-shade-2.0.1.jar:2.0.1]
        at com.github.benmanes.caffeine.cache.LocalManualCache.bulkLoad(LocalManualCache.java:102) ~[hive-catalog-shade-2.0.1.jar:2.0.1]
        at com.github.benmanes.caffeine.cache.LocalManualCache.getAll(LocalManualCache.java:89) ~[hive-catalog-shade-2.0.1.jar:2.0.1]
        at com.github.benmanes.caffeine.cache.LocalLoadingCache.getAll(LocalLoadingCache.java:64) ~[hive-catalog-shade-2.0.1.jar:2.0.1]
        at org.apache.doris.datasource.hive.HiveMetaStoreCache.getFilesByPartitions(HiveMetaStoreCache.java:493) ~[doris-fe.jar:1.2-SNAPSHOT]
        at org.apache.doris.datasource.hive.HiveMetaStoreCache.getFilesByPartitionsWithCache(HiveMetaStoreCache.java:471) ~[doris-fe.jar:1.2-SNAPSHOT]
        at org.apache.doris.datasource.hive.source.HiveScanNode.getFileSplitByPartitions(HiveScanNode.java:226) ~[doris-fe.jar:1.2-SNAPSHOT]
        at org.apache.doris.datasource.hive.source.HiveScanNode.getSplits(HiveScanNode.java:205) ~[doris-fe.jar:1.2-SNAPSHOT]
        at org.apache.doris.datasource.FileQueryScanNode.createScanRangeLocations(FileQueryScanNode.java:271) ~[doris-fe.jar:1.2-SNAPSHOT]
        at org.apache.doris.datasource.FileQueryScanNode.doFinalize(FileQueryScanNode.java:226) ~[doris-fe.jar:1.2-SNAPSHOT]
        at org.apache.doris.datasource.FileQueryScanNode.finalizeForNereids(FileQueryScanNode.java:218) ~[doris-fe.jar:1.2-SNAPSHOT]
        at org.apache.doris.nereids.util.Utils.execWithUncheckedException(Utils.java:71) ~[doris-fe.jar:1.2-SNAPSHOT]
        at org.apache.doris.nereids.glue.translator.PhysicalPlanTranslator.visitPhysicalFileScan(PhysicalPlanTranslator.java:592) ~[doris-fe.jar:1.2-SNAPSHOT]
        at org.apache.doris.nereids.glue.translator.PhysicalPlanTranslator.visitPhysicalFileScan(PhysicalPlanTranslator.java:227) ~[doris-fe.jar:1.2-SNAPSHOT]
        at org.apache.doris.nereids.trees.plans.physical.PhysicalFileScan.accept(PhysicalFileScan.java:109) ~[doris-fe.jar:1.2-SNAPSHOT]
        at org.apache.doris.nereids.glue.translator.PhysicalPlanTranslator.visitPhysicalLimit(PhysicalPlanTranslator.java:1747) ~[doris-fe.jar:1.2-SNAPSHOT]
        at org.apache.doris.nereids.glue.translator.PhysicalPlanTranslator.visitPhysicalLimit(PhysicalPlanTranslator.java:227) ~[doris-fe.jar:1.2-SNAPSHOT]
        at org.apache.doris.nereids.trees.plans.physical.PhysicalLimit.accept(PhysicalLimit.java:159) ~[doris-fe.jar:1.2-SNAPSHOT]
        at org.apache.doris.nereids.glue.translator.PhysicalPlanTranslator.visitPhysicalDistribute(PhysicalPlanTranslator.java:282) ~[doris-fe.jar:1.2-SNAPSHOT]
        at org.apache.doris.nereids.glue.translator.PhysicalPlanTranslator.visitPhysicalDistribute(PhysicalPlanTranslator.java:227) ~[doris-fe.jar:1.2-SNAPSHOT]
        at org.apache.doris.nereids.trees.plans.physical.PhysicalDistribute.accept(PhysicalDistribute.java:87) ~[doris-fe.jar:1.2-SNAPSHOT]
        at org.apache.doris.nereids.glue.translator.PhysicalPlanTranslator.visitPhysicalLimit(PhysicalPlanTranslator.java:1747) ~[doris-fe.jar:1.2-SNAPSHOT]
        at org.apache.doris.nereids.glue.translator.PhysicalPlanTranslator.visitPhysicalLimit(PhysicalPlanTranslator.java:227) ~[doris-fe.jar:1.2-SNAPSHOT]
        at org.apache.doris.nereids.trees.plans.physical.PhysicalLimit.accept(PhysicalLimit.java:159) ~[doris-fe.jar:1.2-SNAPSHOT]
        at org.apache.doris.nereids.glue.translator.PhysicalPlanTranslator.visitPhysicalResultSink(PhysicalPlanTranslator.java:379) ~[doris-fe.jar:1.2-SNAPSHOT]
        at org.apache.doris.nereids.glue.translator.PhysicalPlanTranslator.visitPhysicalResultSink(PhysicalPlanTranslator.java:227) ~[doris-fe.jar:1.2-SNAPSHOT]
        at org.apache.doris.nereids.trees.plans.physical.PhysicalResultSink.accept(PhysicalResultSink.java:70) ~[doris-fe.jar:1.2-SNAPSHOT]
        at org.apache.doris.nereids.glue.translator.PhysicalPlanTranslator.translatePlan(PhysicalPlanTranslator.java:253) ~[doris-fe.jar:1.2-SNAPSHOT]
        at org.apache.doris.nereids.NereidsPlanner.translate(NereidsPlanner.java:330) ~[doris-fe.jar:1.2-SNAPSHOT]
        at org.apache.doris.nereids.NereidsPlanner.plan(NereidsPlanner.java:138) ~[doris-fe.jar:1.2-SNAPSHOT]
        at org.apache.doris.qe.StmtExecutor.executeByNereids(StmtExecutor.java:722) ~[doris-fe.jar:1.2-SNAPSHOT]
        at org.apache.doris.qe.StmtExecutor.execute(StmtExecutor.java:533) ~[doris-fe.jar:1.2-SNAPSHOT]
        at org.apache.doris.qe.StmtExecutor.execute(StmtExecutor.java:512) ~[doris-fe.jar:1.2-SNAPSHOT]
        at org.apache.doris.qe.ConnectProcessor.executeQuery(ConnectProcessor.java:307) ~[doris-fe.jar:1.2-SNAPSHOT]
        at org.apache.doris.qe.ConnectProcessor.handleQuery(ConnectProcessor.java:203) ~[doris-fe.jar:1.2-SNAPSHOT]
        at org.apache.doris.qe.MysqlConnectProcessor.handleQuery(MysqlConnectProcessor.java:177) ~[doris-fe.jar:1.2-SNAPSHOT]
        at org.apache.doris.qe.MysqlConnectProcessor.dispatch(MysqlConnectProcessor.java:205) ~[doris-fe.jar:1.2-SNAPSHOT]
        at org.apache.doris.qe.MysqlConnectProcessor.processOnce(MysqlConnectProcessor.java:258) ~[doris-fe.jar:1.2-SNAPSHOT]
        at org.apache.doris.mysql.ReadListener.lambda$handleEvent$0(ReadListener.java:52) ~[doris-fe.jar:1.2-SNAPSHOT]
        at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1149) ~[?:1.8.0_352-352]
        at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:624) ~[?:1.8.0_352-352]
        at java.lang.Thread.run(Thread.java:750) ~[?:1.8.0_352-352]
Caused by: java.util.concurrent.ExecutionException: org.apache.doris.datasource.CacheException: failed to get input splits for FileCacheKey{location='hdfs://nameservice1/warehouse/tablespace/external/hive/ec_ads_bi.db/ec_settlementtransactions/date_of_file=2024-10-28', inputFormat='org.apache.hadoop.mapred.TextInputFormat'} in catalog hive
        at java.util.concurrent.FutureTask.report(FutureTask.java:122) ~[?:1.8.0_352-352]
        at java.util.concurrent.FutureTask.get(FutureTask.java:192) ~[?:1.8.0_352-352]
        at org.apache.doris.common.util.CacheBulkLoader.loadAll(CacheBulkLoader.java:47) ~[doris-fe.jar:1.2-SNAPSHOT]
        at com.github.benmanes.caffeine.cache.LocalLoadingCache.lambda$newBulkMappingFunction$3(LocalLoadingCache.java:166) ~[hive-catalog-shade-2.0.1.jar:2.0.1]
        ... 41 more
Caused by: org.apache.doris.datasource.CacheException: failed to get input splits for FileCacheKey{location='hdfs://nameservice1/warehouse/tablespace/external/hive/ec_ads_bi.db/ec_settlementtransactions/date_of_file=2024-10-28', inputFormat='org.apache.hadoop.mapred.TextInputFormat'} in catalog hive
        at org.apache.doris.datasource.hive.HiveMetaStoreCache.loadFiles(HiveMetaStoreCache.java:429) ~[doris-fe.jar:1.2-SNAPSHOT]
        at org.apache.doris.datasource.hive.HiveMetaStoreCache.access$300(HiveMetaStoreCache.java:102) ~[doris-fe.jar:1.2-SNAPSHOT]
        at org.apache.doris.datasource.hive.HiveMetaStoreCache$2.load(HiveMetaStoreCache.java:196) ~[doris-fe.jar:1.2-SNAPSHOT]
        at org.apache.doris.datasource.hive.HiveMetaStoreCache$2.load(HiveMetaStoreCache.java:188) ~[doris-fe.jar:1.2-SNAPSHOT]
        at org.apache.doris.common.util.CacheBulkLoader.lambda$null$0(CacheBulkLoader.java:42) ~[doris-fe.jar:1.2-SNAPSHOT]
        at java.util.concurrent.FutureTask.run(FutureTask.java:266) ~[?:1.8.0_352-352]
        ... 3 more
Caused by: java.lang.RuntimeException: Call From w1plui-nifi01.aexpec.com/10.201.20.143 to w1plui-bdp03.aexpec.com.com:8020 failed on socket timeout exception: org.apache.hadoop.net.ConnectTimeoutException: 20000 millis timeout while waiting for channel to be ready for connect. ch : java.nio.channels.SocketChannel[connection-pending remote=w1plui-bdp03.aexpec.com.com/172.67.72.218:8020]; For more details see:  http://wiki.apache.org/hadoop/SocketTimeout
        at org.apache.doris.datasource.hive.HiveMetaStoreCache.getFileCache(HiveMetaStoreCache.java:384) ~[doris-fe.jar:1.2-SNAPSHOT]
        at org.apache.doris.datasource.hive.HiveMetaStoreCache.loadFiles(HiveMetaStoreCache.java:415) ~[doris-fe.jar:1.2-SNAPSHOT]
1 Answers

开启DEBUG日志发现在执行完 End optimize plan后,无后续

2024-10-31 13:05:47,379 DEBUG (mysql-nio-pool-0|343) [MysqlConnectProcessor.dispatch():285] handle command Query
2024-10-31 13:05:47,397 DEBUG (mysql-nio-pool-0|343) [StmtExecutor.executeByNereids():646] Nereids start to execute query:
 select count(*) from hive.ec_dim.dim_bin
2024-10-31 13:05:47,399 DEBUG (mysql-nio-pool-0|343) [NereidsPlanner.analyze():291] Start analyze plan
2024-10-31 13:05:47,455 DEBUG (mysql-nio-pool-0|343) [HMSExternalTable.supportedHiveTable():264] hms table dim_bin is EXTERNAL_TABLE with file format: org.apache.hadoop.hive.ql.io.parquet.MapredParquetInputFormat
2024-10-31 13:05:47,534 DEBUG (mysql-nio-pool-0|343) [HMSExternalTable.initPartitionColumns():541] get 0 partition columns for table: dim_bin
2024-10-31 13:05:47,535 DEBUG (mysql-nio-pool-0|343) [ExternalSchemaCache.loadSchema():79] load schema for SchemaCacheKey{dbName='ec_dim', tblName='dim_bin'} in catalog hive
2024-10-31 13:05:47,564 DEBUG (mysql-nio-pool-0|343) [InitMaterializationContextHook.createAsyncMaterializationContext():110] Enable materialized view rewrite but availableMTMVs is empty, current queryId is stmt[2, 737b8d8703ee4b18-b6548ee69ef7f99c]
2024-10-31 13:05:47,564 DEBUG (mysql-nio-pool-0|343) [NereidsPlanner.analyze():297] End analyze plan
2024-10-31 13:05:47,564 DEBUG (mysql-nio-pool-0|343) [NereidsPlanner.rewrite():306] Start rewrite plan
2024-10-31 13:05:47,567 DEBUG (mysql-nio-pool-0|343) [Role.checkTblPriv():376] failed to get wanted privs: priv predicate: OR, Admin_priv,Select_priv, granted:
2024-10-31 13:05:47,575 DEBUG (mysql-nio-pool-0|343) [NereidsPlanner.rewrite():311] End rewrite plan
2024-10-31 13:05:47,575 DEBUG (mysql-nio-pool-0|343) [NereidsPlanner.optimize():318] Start optimize plan
2024-10-31 13:05:47,584 DEBUG (RowCountRefreshExecutor-0|369) [BasicAsyncCacheLoader.lambda$asyncLoad$0():43] Load async cache [org.apache.doris.datasource.ExternalRowCountCache$RowCountKey@5b4234a6] cost time ms:2
2024-10-31 13:05:47,621 DEBUG (mysql-nio-pool-0|343) [NereidsPlanner.optimize():323] End optimize plan
2024-10-31 13:05:47,631 DEBUG (STATS_FETCH-0|198) [StmtExecutor.executeInternalQuery():3105] INTERNAL QUERY: OriginStatement{originStmt='SELECT * FROM `__internal_schema`.`column_statistics` WHERE `id` = '102213123594925222--1-file_subsrc' AND `catalog_id` = '10138' AND `db_id` = '8890022221661201397'', idx=0}