每日定时导入hive数据仓库的自动化脚本,hive数据仓库脚本


[Author]: kwu 

每日定时导入hive数据仓库的自动化脚本


创建shell脚本,创建临时表,装载数据,转换到正式的分区表中:

#!/bin/sh
# upload logs to hdfs

yesterday=`date --date='1 days ago' +%Y%m%d`

hive -e "
use stage;
create table tracklog_tmp (
dateday string,
datetime string,
ip string ,
cookieid string,
userid string,
logserverip string,
referer string,
requesturl string,
remark1 string,
remark2 string,
alexaflag string,
ua string,
wirelessflag string
)
ROW FORMAT DELIMITED FIELDS TERMINATED BY ' ';"


hive -e "
use stage;
set hive.enforce.bucketing=true;
set hive.exec.compress.output=true;
set mapred.output.compress=true;
set mapred.output.compression.codec=org.apache.hadoop.io.compress.GzipCodec;
set io.compression.codecs=org.apache.hadoop.io.compress.GzipCodec;
load data local inpath '/diskg/logs/tracklog_192.168.1.1/${yesterday}/${yesterday}????.dat' overwrite into table tracklog_tmp;
insert into table tracklog PARTITION (day='${yesterday}')  select  *  from tracklog_tmp;
load data local inpath '/diskg/logs/tracklog_192.168.1.2/${yesterday}/${yesterday}????.dat' overwrite into table tracklog_tmp;
insert into table tracklog PARTITION (day='${yesterday}')  select  *  from tracklog_tmp;
load data local inpath '/diskg/logs/tracklog_192.168.1.3/${yesterday}/${yesterday}????.dat' overwrite into table tracklog_tmp;
insert into table tracklog PARTITION (day='${yesterday}')  select  *  from tracklog_tmp;
load data local inpath '/diskg/logs/trackloguc_192.168.1.1/${yesterday}/${yesterday}????.dat' overwrite into table tracklog_tmp;
insert into table tracklog PARTITION (day='${yesterday}')  select  *  from tracklog_tmp;
load data local inpath '/diskg/logs/trackloguc_192.168.1.2/${yesterday}/${yesterday}????.dat' overwrite into table tracklog_tmp;
insert into table tracklog PARTITION (day='${yesterday}')  select  *  from tracklog_tmp;
load data local inpath '/diskg/logs/trackloguc_192.168.1.3/${yesterday}/${yesterday}????.dat' overwrite into table tracklog_tmp;
insert into table tracklog PARTITION (day='${yesterday}')  select  *  from tracklog_tmp;
"

hive -e "
use stage;
drop table tracklog_tmp ;"


在crontab中加入定时任务

crontab -e 

加入如下代码

#import tracklog
25  07 * * * /opt/bin/hive_opt/import_tracklog.sh




相关内容