批量清理ambari hadoop集羣

1、關閉ambari-server,ambari-agent

ambari-server stop
ambari-agent stop
2、運行清理腳本

chmod +x ambari.sh

./ambari.sh

運行後顯示:Please input your master hostname......

輸入master的域名

#!/bin/bash
# Program:
#    卸載ambari服務
# History:
#    2017/11/10    -    Weiy    -    
PATH=/bin:/sbin:/usr/bin:/usr/sbin:/usr/local/bin:/usr/local/sbin:~/bin
export PATH

#取得集羣的所有主機名,這裏需要注意:/etc/hosts配置的IP和主機名只能用一個空格分割
hostList=$(cat /etc/hosts | tail -n +3 | cut -d ' ' -f 2)
yumReposDir=/etc/yum.repos.d/
alterNativesDir=/etc/alternatives/
pingCount=5
logPre=TDP

read -p "Please input your master hostname: " master
master=${master:-"master"}
ssh $master "ambari-server stop"
#重置ambari數據庫
ssh $master "ambari-server reset"

for host in $hostList
do
    #echo $host
    #檢測主機的連通性
    unPing=$(ping $host -c $pingCount | grep 'Unreachable' | wc -l)
    if [ "$unPing" == "$pingCount" ]; then
        echo -e "$logPre======>$host is Unreachable,please check '/etc/hosts' file"
        continue
    fi

    echo "$logPre======>$host deleting... \n"
    #1.)刪除hdp.repo、HDP.repo、HDP-UTILS.repo和ambari.repo
    ssh $host "cd $yumReposDir"
    ssh $host "rm -rf $yumReposDir/hdp.repo"
    ssh $host "rm -rf $yumReposDir/HDP*"
    ssh $host "rm -rf $yumReposDir/ambari.repo"
    
    #刪除HDP相關的安裝包
    ssh $host "yum remove -y  sqoop.noarch"
    ssh $host "yum remove -y  lzo-devel.x86_64"
    ssh $host "yum remove -y  hadoop-libhdfs.x86_64"
    ssh $host "yum remove -y  rrdtool.x86_64"
    ssh $host "yum remove -y  hbase.noarch"
    ssh $host "yum remove -y  pig.noarch"
    ssh $host "yum remove -y  lzo.x86_64" 
    ssh $host "yum remove -y  ambari-log4j.noarch"
    ssh $host "yum remove -y  oozie.noarch"
    ssh $host "yum remove -y  oozie-client.noarch"
    ssh $host "yum remove -y  gweb.noarch"
    ssh $host "yum remove -y  snappy-devel.x86_64"
    ssh $host "yum remove -y  hcatalog.noarch"
    ssh $host "yum remove -y  python-rrdtool.x86_64"
    ssh $host "yum remove -y  nagios.x86_64"
    ssh $host "yum remove -y  webhcat-tar-pig.noarch"
    ssh $host "yum remove -y  snappy.x86_64"
    ssh $host "yum remove -y  libconfuse.x86_64"
    ssh $host "yum remove -y  webhcat-tar-hive.noarch"
    ssh $host "yum remove -y  ganglia-gmetad.x86_64"
    ssh $host "yum remove -y  extjs.noarch"
    ssh $host "yum remove -y  hive.noarch"
    ssh $host "yum remove -y  hadoop-lzo.x86_64"
    ssh $host "yum remove -y  hadoop-lzo-native.x86_64"
    ssh $host "yum remove -y  hadoop-native.x86_64"
    ssh $host "yum remove -y  hadoop-pipes.x86_64"
    ssh $host "yum remove -y  nagios-plugins.x86_64"
    ssh $host "yum remove -y  hadoop.x86_64"
    ssh $host "yum remove -y  zookeeper.noarch"   
    ssh $host "yum remove -y  hadoop-sbin.x86_64"
    ssh $host "yum remove -y  ganglia-gmond.x86_64"
    ssh $host "yum remove -y  libganglia.x86_64"
    ssh $host "yum remove -y  perl-rrdtool.x86_64"
    ssh $host "yum remove -y  epel-release.noarch"
    ssh $host "yum remove -y  compat-readline5*"
    ssh $host "yum remove -y  fping.x86_64"
    ssh $host "yum remove -y  perl-Crypt-DES.x86_64"
    ssh $host "yum remove -y  exim.x86_64"
    ssh $host "yum remove -y ganglia-web.noarch"
    ssh $host "yum remove -y perl-Digest-HMAC.noarch"
    ssh $host "yum remove -y perl-Digest-SHA1.x86_64"
    ssh $host "yum remove -y bigtop-jsvc.x86_64"
    
    #刪除快捷方式
    ssh $host "cd $alterNativesDir"
    ssh $host "rm -rf hadoop-etc" 
    ssh $host "rm -rf zookeeper-conf"
    ssh $host "rm -rf hbase-conf" 
    ssh $host "rm -rf hadoop-log" 
    ssh $host "rm -rf hadoop-lib"
    ssh $host "rm -rf hadoop-default" 
    ssh $host "rm -rf oozie-conf"
    ssh $host "rm -rf hcatalog-conf" 
    ssh $host "rm -rf hive-conf"
    ssh $host "rm -rf hadoop-man"
    ssh $host "rm -rf sqoop-conf"
    ssh $host "rm -rf hadoop-confone"

    #刪除用戶
    ssh $host "userdel -rf nagios"
    ssh $host "userdel -rf hive"
    ssh $host "userdel -rf ambari-qa"
    ssh $host "userdel -rf hbase"
    ssh $host "userdel -rf oozie"
    ssh $host "userdel -rf hcat"
    ssh $host "userdel -rf mapred"
    ssh $host "userdel -rf hdfs"
    ssh $host "userdel -rf rrdcached"
    ssh $host "userdel -rf zookeeper"
    ssh $host "userdel -rf sqoop"
    ssh $host "userdel -rf puppet"
    ssh $host "userdel -rf flume"
    ssh $host "userdel -rf tez"
    ssh $host "userdel -rf yarn"
	ssh $host "userdel -rf knox" 
	ssh $host "userdel -rf storm" 
	ssh $host "userdel -rf kafka"
	ssh $host "userdel -rf falcon"
	ssh $host "userdel -rf atlas"
	ssh $host "userdel -rf spark"

    #刪除文件夾
    ssh $host "rm -rf /hadoop"
    ssh $host "rm -rf /etc/hadoop" 
    ssh $host "rm -rf /etc/hbase"
    ssh $host "rm -rf /etc/hcatalog" 
    ssh $host "rm -rf /etc/hive"
    ssh $host "rm -rf /etc/ganglia" 
    ssh $host "rm -rf /etc/nagios"
    ssh $host "rm -rf /etc/oozie"
    ssh $host "rm -rf /etc/sqoop"
    ssh $host "rm -rf /etc/zookeeper" 
	ssh $host "rm -rf /etc/flume"
    ssh $host "rm -rf /etc/storm" 
    ssh $host "rm -rf /etc/hive-hcatalog"
    ssh $host "rm -rf /etc/tez" 
    ssh $host "rm -rf /etc/falcon" 
    ssh $host "rm -rf /etc/knox" 
    ssh $host "rm -rf /etc/hive-webhcat"
    ssh $host "rm -rf /etc/kafka" 
    ssh $host "rm -rf /etc/slider" 
    ssh $host "rm -rf /etc/storm-slider-client"
    ssh $host "rm -rf /etc/spark"
	ssh $host "rm -rf /var/hadoop"
    ssh $host "rm -rf /var/run/hadoop" 
    ssh $host "rm -rf /var/run/hbase"
    ssh $host "rm -rf /var/run/hive"
    ssh $host "rm -rf /var/run/ganglia" 
    ssh $host "rm -rf /var/run/nagios"
    ssh $host "rm -rf /var/run/oozie"
    ssh $host "rm -rf /var/run/zookeeper"
    ssh $host "rm -rf /var/run/spark"
    ssh $host "rm -rf /var/run/flume"
    ssh $host "rm -rf /var/run/storm"
    ssh $host "rm -rf /var/run/webhcat"
    ssh $host "rm -rf /var/run/hadoop-yarn"
    ssh $host "rm -rf /var/run/hadoop-mapreduce"
    ssh $host "rm -rf /var/run/kafka"
    ssh $host "rm -rf /var/log/hadoop"
    ssh $host "rm -rf /var/log/hbase"
    ssh $host "rm -rf /var/log/hive"
    ssh $host "rm -rf /var/log/nagios" 
    ssh $host "rm -rf /var/log/oozie"
    ssh $host "rm -rf /var/log/zookeeper" 
    ssh $host "rm -rf /var/log/flume" 
    ssh $host "rm -rf /var/log/storm" 
    ssh $host "rm -rf /var/log/hadoop-yarn" 
    ssh $host "rm -rf /var/log/hadoop-mapreduce" 
    ssh $host "rm -rf /var/log/knox"  
    ssh $host "rm -rf /var/log/falcon" 
    ssh $host "rm -rf /var/log/webhcat" 
    ssh $host "rm -rf /var/log/spark" 
    ssh $host "rm -rf /usr/lib/hadoop"
    ssh $host "rm -rf /usr/lib/hbase"
    ssh $host "rm -rf /usr/lib/hcatalog" 
    ssh $host "rm -rf /usr/lib/hive"
    ssh $host "rm -rf /usr/lib/oozie"
    ssh $host "rm -rf /usr/lib/sqoop"
    ssh $host "rm -rf /usr/lib/zookeeper" 
	ssh $host "rm -rf /usr/lib/flume"
    ssh $host "rm -rf /usr/lib/storm"
    ssh $host "rm -rf /var/lib/hive"
    ssh $host "rm -rf /var/lib/ganglia" 
    ssh $host "rm -rf /var/lib/oozie"
    ssh $host "rm -rf /var/lib/zookeeper"
    ssh $host "rm -rf /var/tmp/oozie"
	ssh $host "rm -rf /var/lib/flume"
	ssh $host "rm -rf /var/lib/hadoop-hdfs"
	ssh $host "rm -rf /var/lib/knox" 
    ssh $host "rm -rf /var/tmp/oozie"
    ssh $host "rm -rf /tmp/hive"
    ssh $host "rm -rf /tmp/nagios" 
    ssh $host "rm -rf /tmp/ambari-qa" 
    ssh $host "rm -rf /tmp/sqoop-ambari-qa"
    ssh $host "rm -rf /var/nagios"
    ssh $host "rm -rf /hadoop/oozie"
    ssh $host "rm -rf /hadoop/zookeeper"
    ssh $host "rm -rf /hadoop/mapred"
    ssh $host "rm -rf /hadoop/hdfs"
    ssh $host "rm -rf /tmp/hadoop-hive" 
    ssh $host "rm -rf /tmp/hadoop-nagios" 
    ssh $host "rm -rf /tmp/hadoop-hcat"
	ssh $host "rm -rf /tmp/hadoop-hdfs"
    ssh $host "rm -rf /tmp/hadoop-ambari-qa" 
    ssh $host "rm -rf /tmp/hadoop" 
    ssh $host "rm -rf /tmp/hsperfdata_hbase"
    ssh $host "rm -rf /tmp/hsperfdata_hive"
    ssh $host "rm -rf /tmp/hsperfdata_nagios"
    ssh $host "rm -rf /tmp/hsperfdata_oozie"
    ssh $host "rm -rf /tmp/hsperfdata_zookeeper"
    ssh $host "rm -rf /tmp/hsperfdata_mapred"
    ssh $host "rm -rf /tmp/hsperfdata_hdfs"
    ssh $host "rm -rf /tmp/hsperfdata_hcat"
    ssh $host "rm -rf /tmp/hsperfdata_ambari-qa"
	ssh $host "rm -rf /var/lib/pgsql"
	ssh $host "rm -rf /home/atlas"
    ssh $host "rm -rf /home/accumulo"
    ssh $host "rm -rf /home/hbase"
    ssh $host "rm -rf /home/hive"
    ssh $host "rm -rf /home/oozie"
    ssh $host "rm -rf /home/storm"
    ssh $host "rm -rf /home/yarn"
    ssh $host "rm -rf /home/ambari-qa"
    ssh $host "rm -rf /home/falcon"
    ssh $host "rm -rf /home/hcat"
    ssh $host "rm -rf /home/kafka"
    ssh $host "rm -rf /home/mahout"
    ssh $host "rm -rf /home/spark"
    ssh $host "rm -rf /home/tez"
    ssh $host "rm -rf /home/zookeeper"
    ssh $host "rm -rf /home/flume"
    ssh $host "rm -rf /home/hdfs"
    ssh $host "rm -rf /home/knox"
    ssh $host "rm -rf /home/mapred"
    ssh $host "rm -rf /home/sqoop"
    ssh $host "rm -rf /tmp/ambari-qa"
    ssh $host "rm -rf /usr/hadoop"
    ssh $host "rm -rf /opt/hadoop"

    #刪除ambari相關包
    ssh $host "yum remove -y ambari-*"
    ssh $host "yum remove -y postgresql"
    ssh $host "rm -rf /var/lib/ambari*"
    ssh $host "rm -rf /var/log/ambari*"
	ssh $host "rm -rf /usr/lib/python2.6/site-packages/ambari_*"
	ssh $host "rm -rf /usr/lib/python2.6/site-packages/resource_management"
	ssh $host "rm -rf /usr/lib/ambari-*"
    ssh $host "rm -rf /etc/ambari*"
	
	#清理yum數據源
	ssh $host "yum clean all"

    echo "$logPre======>$host is done! \n"
done

3、檢驗是否卸載乾淨

yum list | grep @HDP

若沒有,則手動刪除
yum remove -y xx

 

 

發表評論
所有評論
還沒有人評論,想成為第一個評論的人麼? 請在上方評論欄輸入並且點擊發布.
相關文章