# Program:
# uninstall ambari automatic
# History:
# 2014/01/13 - Ivan - 2862099249@qq.com - First release
PATH=/bin:/sbin:/usr/bin:/usr/sbin:/usr/local/bin:/usr/local/sbin:~/bin
export PATH #取得集群的所有主机名,这里需要注意:/etc/hosts配置的IP和主机名只能用一个空格分割
hostList=$(cat /etc/hosts | tail -n +3 | cut -d ' ' -f 2)
yumReposDir=/etc/yum.repos.d/
alterNativesDir=/etc/alternatives/
pingCount=5
logPre=TDP read -p "Please input your master hostname: " master
master=${master:-"master"}
ssh $master "ambari-server stop"
#重置ambari数据库
ssh $master "ambari-server reset" for host in $hostList
do
#echo $host
#检测主机的连通性
unPing=$(ping $host -c $pingCount | grep 'Unreachable' | wc -l)
if [ "$unPing" == "$pingCount" ]; then
echo -e "$logPre======>$host is Unreachable,please check '/etc/hosts' file"
continue
fi echo "$logPre======>$host deleting... \n"
#1.)删除hdp.repo、HDP.repo、HDP-UTILS.repo和ambari.repo
ssh $host "cd $yumReposDir"
ssh $host "rm -rf $yumReposDir/hdp.repo"
ssh $host "rm -rf $yumReposDir/HDP*"
ssh $host "rm -rf $yumReposDir/ambari.repo" #删除HDP相关的安装包
ssh $host "yum remove -y sqoop.noarch"
ssh $host "yum remove -y lzo-devel.x86_64"
ssh $host "yum remove -y hadoop-libhdfs.x86_64"
ssh $host "yum remove -y rrdtool.x86_64"
ssh $host "yum remove -y hbase.noarch"
ssh $host "yum remove -y pig.noarch"
ssh $host "yum remove -y lzo.x86_64"
ssh $host "yum remove -y ambari-log4j.noarch"
ssh $host "yum remove -y oozie.noarch"
ssh $host "yum remove -y oozie-client.noarch"
ssh $host "yum remove -y gweb.noarch"
ssh $host "yum remove -y snappy-devel.x86_64"
ssh $host "yum remove -y hcatalog.noarch"
ssh $host "yum remove -y python-rrdtool.x86_64"
ssh $host "yum remove -y nagios.x86_64"
ssh $host "yum remove -y webhcat-tar-pig.noarch"
ssh $host "yum remove -y snappy.x86_64"
ssh $host "yum remove -y libconfuse.x86_64"
ssh $host "yum remove -y webhcat-tar-hive.noarch"
ssh $host "yum remove -y ganglia-gmetad.x86_64"
ssh $host "yum remove -y extjs.noarch"
ssh $host "yum remove -y hive.noarch"
ssh $host "yum remove -y hadoop-lzo.x86_64"
ssh $host "yum remove -y hadoop-lzo-native.x86_64"
ssh $host "yum remove -y hadoop-native.x86_64"
ssh $host "yum remove -y hadoop-pipes.x86_64"
ssh $host "yum remove -y nagios-plugins.x86_64"
ssh $host "yum remove -y hadoop.x86_64"
ssh $host "yum remove -y zookeeper.noarch"
ssh $host "yum remove -y hadoop-sbin.x86_64"
ssh $host "yum remove -y ganglia-gmond.x86_64"
ssh $host "yum remove -y libganglia.x86_64"
ssh $host "yum remove -y perl-rrdtool.x86_64"
ssh $host "yum remove -y epel-release.noarch"
ssh $host "yum remove -y compat-readline5*"
ssh $host "yum remove -y fping.x86_64"
ssh $host "yum remove -y perl-Crypt-DES.x86_64"
ssh $host "yum remove -y exim.x86_64"
ssh $host "yum remove -y ganglia-web.noarch"
ssh $host "yum remove -y perl-Digest-HMAC.noarch"
ssh $host "yum remove -y perl-Digest-SHA1.x86_64"
ssh $host "yum remove -y bigtop-jsvc.x86_64" #删除快捷方式
ssh $host "cd $alterNativesDir"
ssh $host "rm -rf hadoop-etc"
ssh $host "rm -rf zookeeper-conf"
ssh $host "rm -rf hbase-conf"
ssh $host "rm -rf hadoop-log"
ssh $host "rm -rf hadoop-lib"
ssh $host "rm -rf hadoop-default"
ssh $host "rm -rf oozie-conf"
ssh $host "rm -rf hcatalog-conf"
ssh $host "rm -rf hive-conf"
ssh $host "rm -rf hadoop-man"
ssh $host "rm -rf sqoop-conf"
ssh $host "rm -rf hadoop-confone" #删除用户
ssh $host "userdel -rf nagios"
ssh $host "userdel -rf hive"
ssh $host "userdel -rf ambari-qa"
ssh $host "userdel -rf hbase"
ssh $host "userdel -rf oozie"
ssh $host "userdel -rf hcat"
ssh $host "userdel -rf mapred"
ssh $host "userdel -rf hdfs"
ssh $host "userdel -rf rrdcached"
ssh $host "userdel -rf zookeeper"
ssh $host "userdel -rf sqoop"
ssh $host "userdel -rf puppet"
ssh $host "userdel -rf flume"
ssh $host "userdel -rf tez"
ssh $host "userdel -rf yarn" #删除文件夹
ssh $host "rm -rf /hadoop"
ssh $host "rm -rf /etc/hadoop"
ssh $host "rm -rf /etc/hbase"
ssh $host "rm -rf /etc/hcatalog"
ssh $host "rm -rf /etc/hive"
ssh $host "rm -rf /etc/ganglia"
ssh $host "rm -rf /etc/nagios"
ssh $host "rm -rf /etc/oozie"
ssh $host "rm -rf /etc/sqoop"
ssh $host "rm -rf /etc/zookeeper"
ssh $host "rm -rf /var/run/hadoop"
ssh $host "rm -rf /var/run/hbase"
ssh $host "rm -rf /var/run/hive"
ssh $host "rm -rf /var/run/ganglia"
ssh $host "rm -rf /var/run/nagios"
ssh $host "rm -rf /var/run/oozie"
ssh $host "rm -rf /var/run/zookeeper"
ssh $host "rm -rf /var/log/hadoop"
ssh $host "rm -rf /var/log/hbase"
ssh $host "rm -rf /var/log/hive"
ssh $host "rm -rf /var/log/nagios"
ssh $host "rm -rf /var/log/oozie"
ssh $host "rm -rf /var/log/zookeeper"
ssh $host "rm -rf /usr/lib/hadoop"
ssh $host "rm -rf /usr/lib/hbase"
ssh $host "rm -rf /usr/lib/hcatalog"
ssh $host "rm -rf /usr/lib/hive"
ssh $host "rm -rf /usr/lib/oozie"
ssh $host "rm -rf /usr/lib/sqoop"
ssh $host "rm -rf /usr/lib/zookeeper"
ssh $host "rm -rf /var/lib/hive"
ssh $host "rm -rf /var/lib/ganglia"
ssh $host "rm -rf /var/lib/oozie"
ssh $host "rm -rf /var/lib/zookeeper"
ssh $host "rm -rf /var/tmp/oozie"
ssh $host "rm -rf /tmp/hive"
ssh $host "rm -rf /tmp/nagios"
ssh $host "rm -rf /tmp/ambari-qa"
ssh $host "rm -rf /tmp/sqoop-ambari-qa"
ssh $host "rm -rf /var/nagios"
ssh $host "rm -rf /hadoop/oozie"
ssh $host "rm -rf /hadoop/zookeeper"
ssh $host "rm -rf /hadoop/mapred"
ssh $host "rm -rf /hadoop/hdfs"
ssh $host "rm -rf /tmp/hadoop-hive"
ssh $host "rm -rf /tmp/hadoop-nagios"
ssh $host "rm -rf /tmp/hadoop-hcat"
ssh $host "rm -rf /tmp/hadoop-ambari-qa"
ssh $host "rm -rf /tmp/hsperfdata_hbase"
ssh $host "rm -rf /tmp/hsperfdata_hive"
ssh $host "rm -rf /tmp/hsperfdata_nagios"
ssh $host "rm -rf /tmp/hsperfdata_oozie"
ssh $host "rm -rf /tmp/hsperfdata_zookeeper"
ssh $host "rm -rf /tmp/hsperfdata_mapred"
ssh $host "rm -rf /tmp/hsperfdata_hdfs"
ssh $host "rm -rf /tmp/hsperfdata_hcat"
ssh $host "rm -rf /tmp/hsperfdata_ambari-qa" #删除ambari相关包
ssh $host "yum remove -y ambari-*"
ssh $host "yum remove -y postgresql"
ssh $host "rm -rf /var/lib/ambari*"
ssh $host "rm -rf /var/log/ambari*"
ssh $host "rm -rf /etc/ambari*" echo "$logPre======>$host is done! \n"
done
 

Ambari自动化卸载shell脚本的更多相关文章

  1. 自动化部署--shell脚本--1

    传统部署方式1.纯手工scp2.纯手工登录git pull .svn update3.纯手工xftp往上拉4.开发给打一个压缩包,rz上去.解压 传统部署缺点:1.全程运维参与,占用大量时间2.上线速 ...

  2. 自动化部署--shell脚本--2

    node1和node2都装apache   [root@linux-node1 ~]# yum install httpd -y Loaded plugins: fastestmirror Loadi ...

  3. 自动化部署--shell脚本--3

    登录gitlab第一次登录gitlab,需要为root用户修改密码,root用户也是gitlab的超级管理员.http://192.168.58.11设置密码是gitlab 密码不够长,重新设置下,设 ...

  4. LINUX系统自动化部署shell脚本

    #!/bin/shsudo /etc/init.d/tomcatstopwaitsudo rm -rf /opt/tomcat7/work/*waitsudo rm -rf /opt/tomcat7/ ...

  5. linux服务器部署jar包以及shell脚本的书写

    背景:记录在linux环境下部署jar程序的过程 1 部署过程记录 1.1 程序结构 这里的main函数就在DemRest2.java 文件中. 为了部署方便,要做到以下两点: 1 在导出的jar包中 ...

  6. CentOS 下运维自动化 Shell 脚本之 expect

    CentOS 下运维自动化 Shell脚本之expect 一.预备知识: 1.在 Terminal 中反斜杠,即 "" 代表转义符,或称逃脱符.("echo -e与pri ...

  7. fdisk分区硬盘并shell脚本自动化

    最近工作需要用到对硬盘进行shell脚本自动化分区和mount的操作,google了一些资料,下面做个总结. 如果硬盘没有进行分区(逻辑分区或者扩展分区,关于两者概念,自行google),我们将无法将 ...

  8. 通过shell脚本实现代码自动化部署

    通过shell脚本实现代码自动化部署 一.传统部署方式及优缺点 1.传统部署方式 (1)纯手工scp (2)纯手工登录git pull.svn update (3)纯手工xftp往上拉 (4)开发给打 ...

  9. Shell脚本,自动化发布tomcat项目【转载】

    Shell脚本,自动化发布tomcat项目脚本. 1. vko2c_auto_build_by_scp.sh 文件内容: #---------------------start------------ ...

随机推荐

  1. geohash-net实现

    基于c#语言 geohash算法基本实现源码,参见: https://github.com/sharonjl/geohash-net , 源码中具体包含如下方法: String CalculateAd ...

  2. javascript标识符

    标识符,就是指变量.函数.属性的名字,或者函数的参数. 规则 1.第一个字符必须是一个字母.下划线或是美元符号($) 2.其他字符可以是字母.下划线.美元符号或数字 3.不能是关键字和保留字 4.区分 ...

  3. AXIS最佳实践

    前言: Axis是apache一个开源的webservice服务,需要web容器进行发布.本节主要用于介绍使用Axis开发webservice,包括服务端的创建.webservice的部署.客户端的调 ...

  4. 在CentOS上安装并运行SparkR

    环境配置—— 操作系统:CentOS 6.5 JDK版本:1.7.0_67 Hadoop集群版本:CDH 5.3.0 安装过程—— 1.安装R yum install -y R 2.安装curl-de ...

  5. 在CentOS 6.x中支持exfat格式的U盘(移动硬盘)

    参考资料:http://linux.cn/article-1503-1.html CentOS系列一直没有默认支持使用exfat格式的大容量U盘(移动硬盘),但可以通过添加fuse-exfat模块来支 ...

  6. asp.net动态设置标题title 关键字keywords 描述descrtptions

    推荐的简单做法如下: protected void Page_Load(object sender, EventArgs e){//Page titlePage.Title = "This ...

  7. STM32固件库3.5+uCOS2.86移植(转自暴走的工程师)

    考了很多移植的资料和代码,终于移植好了...应该是完美移植吧~~哈哈哈~~ 编译环境是IAR 工程适用于STM32F10X大容量产品,如果不是,请自行修改启动文件和工程配置 编译器优化等级最高...这 ...

  8. [LeetCode] 10. Regular Expression Matching

    Implement regular expression matching with support for '.' and '*'. DP: public class Solution { publ ...

  9. Use Visual studio 2010 build Python2.7.10

    http://p-nand-q.com/python/building-python-27-with-vs2010.html

  10. java数据传递例子+内存分析

    一.引用传递 1.例子1 package com.jikexueyuan.ref; class Ref1{ int temp = 10; } public class RefDemo01 { publ ...