使用shell脚本来实现,脚本如下:
#!/bin/bash# 定义变量
pwd=/root
day=$(date +%Y%m%d)
before_day=$(date -d -1day +%Y%m%d)
log_file=$pwd/load_redis_$day.log# 创建目录
mkdir -p $pwd/$day && echo "$(date '+%Y-%m-%d %H:%M:%S')----$pwd/$day 目录已创建" > $log_file# 导出hive数据
function export_hive(){echo "导出数据为 insert overwrite local directory '$pwd/$day/' row format delimited fields terminated by '|' select a,b,c,d from dw_wangxinqing_yyyymmdd1 where day_id=$day;" >> $log_fileecho "$(date '+%Y-%m-%d %H:%M:%S') -------开始从hive导出数据--------" >> $log_filehive -e "insert overwrite local directory '$pwd/$day/' row format delimited fields terminated by '|' select a,b,c,d from dw_wangxinqing_yyyymmdd1 where day_id=$before_day;" && mv $pwd/$day/000000* $pwd/$day/hive_$before_day.txtif [ $? -ne 0 ]; thenecho "$(date '+%Y-%m-%d %H:%M:%S')-----导出失败-----" >> $log_fileexit 1elseecho "$(date '+%Y-%m-%d %H:%M:%S')-----导出成功-----" >> $log_filefi
}# 清理hive数据
function Clear_hive(){rm -f $pwd/$day/redis_$before_day.txtawk -F "|" '{print "HMSET asiainfo:"$1" telnum "$1" region "$2" crowd "$3" activeType "$4"\nEXPIRE asiainfo:"$1" 300"}' $pwd/$day/hive_$before_day.txt > $pwd/$day/redis_$before_day.txtunix2dos $pwd/$day/hive_$before_day.txtecho "$(date '+%Y-%m-%d %H:%M:%S')-----格式转换成功-----" >> $log_filescp $pwd/$day/redis_$before_day.txt hadoop1:$pwd/if [ $? -ne 0 ]; thenecho "$(date '+%Y-%m-%d %H:%M:%S')-----传输到目标服务器失败-----" >> $log_fileexit 1elseecho "$(date '+%Y-%m-%d %H:%M:%S')-----传输到目标服务器成功-----" >> $log_filefi
}# 加载到redis
function Load_redis(){echo "-------------可以load到redis------------" >> $log_filessh hadoop1 "time cat $pwd/redis_$before_day.txt | /usr/bin/redis-cli --pipe -a 'password'" >> $log_file
}# 开始执行
function start(){export_hiveClear_hiveLoad_redis
}start
在原文基础上做了优化,怕忘了,所以写个文章记录下,原文链接如下:
两种方式用Shell实现把Hive表数据导入到redis_shell脚本 hive加载到doris_刘先生我在呀的博客-CSDN博客