Doris 存算分离集群 insert 操作报错

Viewed 65

按文档部署完成存算分离集群之后, insert 操作报错, 详细日志请看链接下载附件
FE日志:http://archive.uazw.com/tmp/fe.log
BE日志:http://archive.uazw.com/tmp/be.INFO
操作步骤

  1. 创建远程共享存储(无异常)
CREATE STORAGE VAULT IF NOT EXISTS hdfs_vault PROPERTIES (
    "type"="hdfs",                                     
    "fs.defaultFS"="hdfs://xxxxprdhadoop",             
    "path_prefix"="doris/cluster/01",                    
    "hadoop.security.authentication"="kerberos",        
    "hadoop.kerberos.principal"="xxx@XXX.COM",       
    "hadoop.kerberos.keytab"="/xxx/security/keytabs/hdp.keytab"         
);
  1. 创建数据库对象(无异常)
create database demo;
CREATE TABLE demo.unique_tbl
( 
`period_name` char(7) NOT NULL COMMENT '期间', 
`company` varchar(20) NOT NULL COMMENT '公司', 
`document` varchar(20) NOT NULL COMMENT '单据', 
`balance` varchar(20) NOT NULL COMMENT '勾销状态' 
) ENGINE=OLAP 
UNIQUE KEY(`period_name`, `company`, `document`) 
PARTITION BY LIST(`period_name`) 
(
  PARTITION P_2023_01 VALUES IN ("2023-01"), 
  PARTITION P_2023_02 VALUES IN ("2023-02"), 
  PARTITION P_2023_03 VALUES IN ("2023-03"), 
  PARTITION P_2023_04 VALUES IN ("2023-04"), 
  PARTITION P_2023_05 VALUES IN ("2023-05")
) 
DISTRIBUTED BY HASH(`period_name`
) BUCKETS AUTO 
PROPERTIES ( 
    "replication_num" = "3",
    "storage_vault_name" = "hdfs_vault",
    "enable_unique_key_merge_on_write" = "true"
);
  1. 插入数据(异常)
insert into demo.unique_tbl 
(period_name, company, document, balance)
values 
('2023-01', '3021', 'ebs_3021000001', '未勾销'),
('2023-01', '3022', 'ebs_3022000001', '未勾销');

版本: 3.0.2 存算分离
vault类型:hdfs 3.3.4

配置

  • fe
CUR_DATE=`date +%Y%m%d-%H%M%S`
LOG_DIR = /xx/doris/logs/fe/
JAVA_OPTS="-Dfile.encoding=UTF-8 -Djavax.security.auth.useSubjectCredsOnly=false -Xss4m -Xmx16384m -XX:+UnlockExperimentalVMOptions -XX:+UseG1GC -XX:MaxGCPauseMillis=200 -XX:+PrintGCDateStamps -XX:+PrintGCDetails -Xloggc:$LOG_DIR/log/fe.gc.log.$CUR_DATE -XX:+UseGCLogFileRotation -XX:NumberOfGCLogFiles=10 -XX:GCLogFileSize=50M -Dlog4j2.formatMsgNoLookups=true"
JAVA_OPTS_FOR_JDK_17="-Dfile.encoding=UTF-8 -Djavax.security.auth.useSubjectCredsOnly=false -Xmx16384m -Xms16384m -XX:+HeapDumpOnOutOfMemoryError -XX:HeapDumpPath=$LOG_DIR -Xlog:gc*:$LOG_DIR/fe.gc.log.$CUR_DATE:time,uptime:filecount=10,filesize=50M --add-opens=java.base/java.nio=ALL-UNNAMED --add-opens java.base/jdk.internal.ref=ALL-UNNAMED"
http_port = 8030
rpc_port = 9020
query_port = 9030
edit_log_port = 9010
arrow_flight_sql_port = -1
sys_log_level = INFO
sys_log_mode = ASYNC

deploy_mode = cloud
cluster_id = 1
meta_service_endpoint = 10.99.178.xx1:5000,10.99.178.xx2:5000,10.99.178.xx3:5000
JAVA_HOME = /opt/zulu17.54.21-ca-jdk17.0.13-linux_x64
lower_case_table_names = 1

meta_dir = /cmccssd1/doris/meta-data
priority_networks = 10.99.178.0/24

enable_single_replica_load = true
experimental_enable_workload_group = true
experimental_enable_cpu_hard_limit=true
enable_feature_binlog = true

backend_rpc_timeout_ms = 600000
remote_fragment_exec_timeout_ms = 30000
stream_load_default_precommit_timeout_second=3600
stream_load_default_timeout_second = 259200
max_stream_load_record_size=500000
max_bytes_per_broker_scanner = 1099511627776
max_broker_concurrency = 30
fe_thrift_max_pkg_bytes = -1
  • be
CUR_DATE=`date +%Y%m%d-%H%M%S`
LOG_DIR = /xxxssd2/doris/logs/be/
JAVA_OPTS="-Dfile.encoding=UTF-8 -Xmx4096m -DlogPath=$LOG_DIR/jni.log -Xloggc:$LOG_DIR/be.gc.log.$CUR_DATE -XX:+UseGCLogFileRotation -XX:NumberOfGCLogFiles=10 -XX:GCLogFileSize=50M -Djavax.security.auth.useSubjectCredsOnly=false -Dsun.security.krb5.debug=true -Dsun.java.command=DorisBE -XX:-CriticalJNINatives"
JAVA_OPTS_FOR_JDK_17="-Dfile.encoding=UTF-8 -Xmx4096m -DlogPath=$LOG_DIR/jni.log -Xlog:gc*:$LOG_DIR/be.gc.log.$CUR_DATE:time,uptime:filecount=10,filesize=50M -Djavax.security.auth.useSubjectCredsOnly=false -Dsun.security.krb5.debug=true -Dsun.java.command=DorisBE -XX:-CriticalJNINatives -XX:+IgnoreUnrecognizedVMOptions --add-opens=java.base/java.lang=ALL-UNNAMED --add-opens=java.base/java.lang.invoke=ALL-UNNAMED --add-opens=java.base/java.lang.reflect=ALL-UNNAMED --add-opens=java.base/java.io=ALL-UNNAMED --add-opens=java.base/java.net=ALL-UNNAMED --add-opens=java.base/java.nio=ALL-UNNAMED --add-opens=java.base/java.util=ALL-UNNAMED --add-opens=java.base/java.util.concurrent=ALL-UNNAMED --add-opens=java.base/java.util.concurrent.atomic=ALL-UNNAMED --add-opens=java.base/sun.nio.ch=ALL-UNNAMED --add-opens=java.base/sun.nio.cs=ALL-UNNAMED --add-opens=java.base/sun.security.action=ALL-UNNAMED --add-opens=java.base/sun.util.calendar=ALL-UNNAMED --add-opens=java.security.jgss/sun.security.krb5=ALL-UNNAMED --add-opens=java.management/sun.management=ALL-UNNAMED"
JEMALLOC_CONF="percpu_arena:percpu,background_thread:true,metadata_thp:auto,muzzy_decay_ms:15000,dirty_decay_ms:15000,oversize_threshold:0,prof:false,lg_prof_interval:32,lg_prof_sample:19,prof_gdump:false,prof_accum:false,prof_leak:false,prof_final:false"
JEMALLOC_PROF_PRFIX=""
be_port = 9060
webserver_port = 8040
heartbeat_service_port = 9050
brpc_port = 8060
arrow_flight_sql_port = -1
enable_https = false
ssl_certificate_path = "$DORIS_HOME/conf/cert.pem"
ssl_private_key_path = "$DORIS_HOME/conf/key.pem"
sys_log_level = INFO
aws_log_level=0
AWS_EC2_METADATA_DISABLED=true

deploy_mode = cloud
file_cache_path = [{"path":"/xxxssd2/doris/cache","total_size":1073741824},{"path":"/xxxssd3/doris/cache","total_size":1073741824},{"path":"/xxxssd4/doris/cache","total_size":1649267441664},{"path":"/xxxssd5/doris/cache","total_size":1649267441664},{"path":"/xxxssd6/doris/cache","total_size":1649267441664},{"path":"/xxxssd7/doris/cache","total_size":1649267441664},{"path":"/xxxssd8/doris/cache","total_size":1649267441664},{"path":"/xxxssd9/doris/cache","total_size":1649267441664},{"path":"/xxxssd10/doris/cache","total_size":1649267441664},{"path":"/xxxssd11/doris/cache","total_size":1649267441664},{"path":"/xxxssd12/doris/cache","total_size":1649267441664},{"path":"/xxxssd13/doris/cache","total_size":1649267441664},{"path":"/xxxssd14/doris/cache","total_size":1649267441664}]
JAVA_HOME = /opt/zulu17.54.21-ca-jdk17.0.13-linux_x64
lower_case_table_names = 1
priority_networks = 10.99.178.0/24
storage_root_path = /xxxssd4/doris/data,medium:HDD;/xxxssd5/doris/data,medium:HDD;/xxxssd6/doris/data,medium:HDD;/xxxssd7/doris/data,medium:HDD;/xxxssd8/doris/data,medium:HDD;/xxxssd9/doris/data,medium:HDD;/xxxssd10/doris/data,medium:HDD;/xxxssd11/doris/data,medium:HDD;/xxxssd12/doris/data,medium:HDD;/xxxssd13/doris/data,medium:HDD;/xxxssd14/doris/data,medium:HDD;
mem_limit = 80%

fragment_pool_thread_num_max = 2048
fragment_pool_queue_size = 4096
brpc_num_threads = 256
enable_stream_load_record = true

string_type_length_soft_limit_bytes = 104857600
streaming_load_json_max_mb = 102400
streaming_load_max_mb = 1024000
streaming_load_rpc_max_alive_time_sec = 1200
string_type_length_soft_limit_bytes = 10485760

enable_single_replica_load = true
enable_feature_binlog = true
enable_query_memory_overcommit = false
  • ms
brpc_listen_port = 5000
brpc_num_threads = -1
brpc_idle_timeout_sec = 30
fdb_cluster = fdbcluster:xxxfdb@10.99.178.xx:4500,10.99.178.xx:4500,10.99.178.xx:4500
fdb_cluster_file_path = ./conf/fdb.cluster
http_token = greedisgood9999
label_keep_max_second = 259200
expired_txn_scan_key_nums = 1000
log_dir = /xxxssd2/doris/logs/ms/
log_level = info
log_size_mb = 1024
log_filenum_quota = 10
log_immediate_flush = false
recycle_interval_seconds = 3600
retention_seconds = 259200
recycle_concurrency = 16
max_num_stages = 40
1 Answers

这个是偶发性的,还是持续性的?我这边测试没什么问题