【快捷部署】021_Hadoop(3.3.2)

#!/bin/bash ################################################################################# # 作者:cxy@toc 2024-04-17 # 功能:快捷部署 Hadoop # 说明: ################################################################################# info(){ echo -e "\033[34m 【`date '+%Y-%m-%d %H:%M:%S'`\033[0m" "\033[35m$1\033[0m " } proj_base_name=cxy # 安装JDK install_jdk(){ mkdir -p /${proj_base_name}/bao mkdir -p /${proj_base_name}/jdk cd /${proj_base_name}/bao wget https://repo.huaweicloud.com/java/jdk/8u151-b12/jdk-8u151-linux-x64.tar.gz tar xf /${proj_base_name}/bao/jdk-8u151-linux-x64.tar.gz -C /${proj_base_name}/jdk cat >> /etc/profile <<EOF export JAVA_HOME=/${proj_base_name}/jdk/jdk1.8.0_151 export JRE_HOME=\${JAVA_HOME}/jre export CLASSPATH=.:\${JAVA_HOME}/lib:\${JRE_HOME}/lib export PATH=.:\${JAVA_HOME}/bin:\$PATH EOF source /etc/profile info "JDK部署完毕 ..." } #自定义配置文件 diy_custom_config(){ cd /${proj_base_name}/hadoop/hadoop-3.3.2 #备份要改动的文件 cp etc/hadoop/hadoop-env.sh etc/hadoop/hadoop-env.sh.bak cp etc/hadoop/core-site.xml etc/hadoop/core-site.xml.bak cp etc/hadoop/hdfs-site.xml etc/hadoop/hdfs-site.xml.bak #修改 hadoop-env.sh sed -i '42aexport JAVA_HOME='/${proj_base_name}/jdk'/jdk1.8.0_151' etc/hadoop/hadoop-env.sh #修改 core-site.xml rm -f etc/hadoop/core-site.xml touch etc/hadoop/core-site.xml cat >> etc/hadoop/core-site.xml <<EOF <?xml version="1.0" encoding="UTF-8"?> <?xml-stylesheet type="text/xsl" href="configuration.xsl"?> <configuration> <property> <name>fs.defaultFS</name> <value>hdfs://localhost:9000</value> </property> <!-- 数据存储目录 --> <property> <name>hadoop.tmp.dir</name> <value>/${proj_base_name}/hadoop/data/tmp</value> </property> <property> <name>hadoop.http.staticuser.user</name> <value>root</value> </property> </configuration> EOF #修改 hdfs-site.xml rm -f etc/hadoop/hdfs-site.xml touch etc/hadoop/hdfs-site.xml cat >> etc/hadoop/hdfs-site.xml <<EOF <?xml version="1.0" encoding="UTF-8"?> <?xml-stylesheet type="text/xsl" href="configuration.xsl"?> <configuration> <property> <name>dfs.namenode.name.dir</name> <value>file:/${proj_base_name}/hadoop/data/hdfs/name</value> </property> <property> <name>dfs.datanode.data.dir</name> <value>file:/${proj_base_name}/hadoop/data/hdfs/data</value> </property> <property> <name>dfs.replication</name> <value>1</value> </property> </configuration> EOF } # 部署Hadoop(Standalone) install_Hadoop(){ mkdir /${proj_base_name}/hadoop cd /${proj_base_name}/bao wget https://repo.huaweicloud.com/apache/hadoop/common/hadoop-3.3.2/hadoop-3.3.2.tar.gz --no-check-certificate tar zxvf hadoop-3.3.2.tar.gz -C /${proj_base_name}/hadoop cat >> /etc/profile <<EOF #Hadoop export HADOOP_HOME=/${proj_base_name}/hadoop/hadoop-3.3.2 export PATH=\$PATH:\$HADOOP_HOME/bin export PATH=\$PATH:\$HADOOP_HOME/sbin export HDFS_NAMENODE_USER=root export HDFS_DATANODE_USER=root export HDFS_SECONDARYNAMENODE_USER=root export YARN_RESOURCEMANAGER_USER=root export YARN_NODEMANAGER_USER=root EOF source /etc/profile hadoop version diy_custom_config #配置免密登录 ssh-keygen -t rsa -P '' -f ~/.ssh/id_rsa cat ~/.ssh/id_rsa.pub >> ~/.ssh/authorized_keys chmod 0600 ~/.ssh/authorized_keys # 初始化文件系统 bin/hdfs namenode -format # 启动服务 sbin/start-dfs.sh info "安装完成,访问:http://$(curl ifconfig.me/ip):9870/" } install_jdk install_Hadoop
上一篇:如何在TestNG中忽略测试用例


下一篇:深入剖析图像平滑与噪声滤波