使用sqoop1將hive導入mysql

#!/bin/sh
#數據連接 
srcConnect="connect jdbc:mysql://10.2.1.1:3306/test"
#臨時表存放的目錄
tempTabPath=/user/test

sql="select  NVL(rowkey,'') as rowkey, 
             NVL(projid,'') as projid,
NVL(devid,'')  as devid,
NVL(barcode,'') as barcode,
NVL(devaddr,'') as devaddr,
NVL(runmode_mb,'') as runmode_mb,
    NVL(starttime,TIMESTAMP('1971-01-01 00:30:00')) as starttime,
    NVL(endttime,TIMESTAMP('1971-01-01 00:30:00')) as endttime,
    NVL(receivetime,TIMESTAMP('1971-01-01 00:30:00')) as receivetime
from test"


echo "++++++++++++++++++++開始導入數據:++++++++++++++++++++++++++++++++"


#將hive表的數據,導入到hdfs上  
hive -e "
        use default;

insert overwrite  directory  '${tempTabPath}' row format delimited fields terminated by '\t' ${sql};
       "
#利用sqoop,將hive得數據導入到mysql集羣   
  sqoop export \
  --${srcConnect} \
  --username root \
  --password 1234qwer \
  --table speed_test \
  --export-dir ${tempTabPath} \
  --input-fields-terminated-by '\t' \
       -- m 5
echo "++++++++++++++++++++結束導入數據:++++++++++++++++++++++++++++++++"  
  
     hadoop fs -rm -r ${tempTabPath} 

 

執行腳本前,在mysql創建一個跟hive表結構相同的表,否則,導入數據報錯。

 
發表評論
所有評論
還沒有人評論,想成為第一個評論的人麼? 請在上方評論欄輸入並且點擊發布.
相關文章