1,背景:carbondata的入库segments对应的状态都是success,但是查询的时候报错,
2,排查内容
1,segments的状态 success
2,任务执行记录日志 正常
3,找到对应查询的天,指定对应的segments,进行查询,有能查到数据的
SET carbon.input.segments.default.table_name = segments_id;
select * from table_name limit 1;
有结果
3,排查原因
1,表结构没问题,之前查询都是正常的
4,可能原因
1,数据入库时候数据异常的问题
5,验证4的想法
用脚本实现,逐个排查有问题的那一天的segments
发现有些segments没有,最后排查,有些segment是有问题的,虽然状态是succes,删掉对应的segments,查询正常
6,根据处理步骤,整理出来自动删除脚本如下
############
mkdir -pv logs
mkdir -pv result
##################第一个脚本,获取segmentid以及比对异常segementid
get_segment_id.sh
#!/bin/bash
source /etc/profile
date_s=`date -d "1 day ago" +"%Y-%m-%d 00:00:01"`
date_e=`date -d "1 day ago" +"%Y-%m-%d 23:59:59"`
rm -f ./result/*
rm -f *.txt
/home/eversec/jdbc/bin/everdata-jdbc.sh -i "jdbc:hive2://10.192.21.1:10000" -q " SHOW SEGMENTS ON default.table_name as select * from table_name_segments where loadstarttime>='$date_s' and loadstarttime<='$date_e' order by loadStartTime asc limit 1;" -o start_id.txt
/home/eversec/jdbc/bin/everdata-jdbc.sh -i "jdbc:hive2://10.192.21.1:10000" -q " SHOW SEGMENTS ON default.table_name as select * from table_name_segments where loadstarttime>='$date_s' and loadstarttime<='$date_e' order by loadStartTime desc limit 1;" -o end_id.txt
if [ -f start_id.txt ] && [ -f end_id.txt ]; then
sid=`cat start_id.txt|awk -F',' '{print $1}'`
eid=`cat end_id.txt|awk -F',' '{print $1}'`
for ((i=$sid; i<=$eid; i++))
do
/home/eversec/jdbc/bin/everdata-jdbc.sh -i "jdbc:hive2://10.192.21.1:10000" -q "SET carbon.input.segments.default.table_name = $i; select * from table_name where order by hour limit 1;" -o ./result/$i
echo $i >>auto_id.txt
done
fi
##################第二个脚本,删除使用
cat dele_bad_segment.sh
#!/bin/bash
dele_id=""
ls ./result/* |awk -F'/' '{print $3}'> ./segment_id.txt
num_autoid=`cat auto_id.txt |wc -l`
if [ $num_autoid -gt 0 ];then
while read line
do
num=`cat segment_id.txt |grep "$line"|wc -l`
if [ $num -eq 0 ];then
echo $line >> bad_segmentid.txt
fi
done <auto_id.txt
fi
num_de=`cat bad_segmentid.txt |wc -l`
if [ $num_de -gt 0 ];then
end_id=`tail -1 bad_segmentid.txt`
start_id=`head -1 bad_segmentid.txt`
while read line
do
if [ $line -eq $start_id ] ;then
dele_id="${line}"
elif [ $line -ne $start_id ];then
dele_id="${dele_id},${line}"
fi
done <bad_segmentid.txt
echo $dele_id
/home/eversec/jdbc/bin/everdata-jdbc.sh -i "jdbc:hive2://10.192.21.1:10000" -q "DELETE FROM TABLE table_name WHERE SEGMENT.ID IN ($dele_id);"
fi