hadoop 集群下

分享 123456789987654321 ⋅ 于 2020-07-18 14:14:40 ⋅ 2156 阅读

配置5台虚拟机

s3

#虚拟机名称
[root@nn1 ~]# hostnamectl set-hostname s3.hadoop
#修改虚拟机ip
192.168.92.34

ssh(集群之间免密登录)

[hadoop@nn1 ~]$ mkdir sshkey
[hadoop@nn1 ~]$ ssh-keygen -t rsa

[hadoop@nn1 ~]$ cd .ssh/
[hadoop@nn1 .ssh]$ cp id_rsa.pub ~/sshkey/nn1
#切换nn2
[hadoop@nn1 .ssh]$ sh nn2.hadoop
[hadoop@nn1 ~]$ ssh-keygen -t rsa
[hadoop@nn2 .ssh]$ scp id_rsa.pub hadoop@nn1.hadoop:~/sshkey/nn2
ctrl d 退出
#创建文件夹,追加公钥
[hadoop@nn1 sshkey]$ touch authorized_keys
[hadoop@nn1 sshkey]$ chmod 600 authorized_keys 
[hadoop@nn1 sshkey]$ cat nn1 >> authorized_keys 
[hadoop@nn1 sshkey]$ cat nn2 >> authorized_keys 
[hadoop@nn1 sshkey]$ cat s1 >> authorized_keys 
[hadoop@nn1 sshkey]$ cat s2 >> authorized_keys 
[hadoop@nn1 sshkey]$ cat s3 >> authorized_keys 
#五台机器秘钥分发
[hadoop@nn1 sshkey]$ scp authorized_keys hadoop@nn1.hadoop:~/.ssh
[hadoop@nn1 sshkey]$ scp authorized_keys hadoop@nn2.hadoop:~/.ssh
...

#切换用户
[hadoop@s2 ~]$ ssh nn1.hadoop
#批量脚本
[hadoop@nn1 ~]$ unzip hadoop_base_op.zip 
#权限
[hadoop@nn1 ~]$ cd hadoop_base_op
[hadoop@nn1 hadoop_base_op]$ chmod 777 ./*
#修改用户名
[hadoop@nn1 hadoop_base_op]$ vim ssh_root.sh

zookeeper安装

#分发zookeeper
[hadoop@nn1 zookeeper_base_op]$ sh scp_all.sh /tmp/upload/zookeeper-3.4.8.tar.gz  /tmp/
#解压
[hadoop@nn1 zookeeper_base_op]$ sh ssh_root.sh tar -zxf /tmp/zookeeper-3.4.8.tar.gz -C /usr/local/
#权限
[hadoop@nn1 zookeeper_base_op]$ sh ssh_root.sh chown -R hadoop:hadoop /usr/local/zookeeper-3.4.8/
#配置软连接
[hadoop@nn1 zookeeper_base_op]$ sh ssh_root.sh ln -s /usr/local/zookeeper-3.4.8/ /usr/local/zookeeper 
#权限
[hadoop@nn1 zookeeper_base_op]$ sh ssh_root.sh chown -h hadoop:hadoop /usr/local/zookeeper

[hadoop@nn1 zookeeper_base_op]$ sh ssh_root.sh chmod 770 /usr/local/zookeeper-3.4.8/

配置文件

[hadoop@nn1 zookeeper_base_op]$ cd /usr/local/zookeeper/conf/
[hadoop@nn1 conf]$ sh ~/zookeeper_base_op/ssh_all.sh rm -rf /usr/local/zookeeper/conf/zoo_sample.cfg

[hadoop@nn1 conf]$ sh ~/zookeeper_base_op/scp_all.sh /tmp/upload/zoo.cfg /usr/local/zookeeper/conf/
#创建data目录
[hadoop@nn1 hadoop_base_op]$ sh ssh_root.sh mkdir /data
[hadoop@nn1 hadoop_base_op]$ sh ssh_root.sh chown -R hadoop:hadoop /data

#修改输出日志配置文件所在目录
[hadoop@nn1 hadoop_base_op]$ vim /usr/local/zookeeper/bin/zkEnv.sh   
ZOO_LOG_DIR=/data
#分发 日志文件
[hadoop@nn1 zookeeper_base_op]$ sh ssh_all.sh /usr/local/zookeeper/bin/zkEnv.sh /usr/local/zookeeper/bin/
#
[hadoop@nn1 data]$ touch myid && echo 1 > myid && cat myid
1
[hadoop@nn1 data]$ ssh nn2.hadoop
Last login: Sat Jul 11 15:16:53 2020 from nn1.hadoop
[hadoop@nn2 ~]$ cd /data
[hadoop@nn2 data]$ touch myid && echo 2 > myid && cat myid 
2
[hadoop@nn2 data]$ ssh s1.hadoop
Last login: Sat Jul 11 15:16:53 2020 from nn1.hadoop
[hadoop@s1 ~]$ touch myid && echo 3 > myid && cat myid   
3

配置环境变量

[hadoop@nn1 ~]$ su root
[root@nn1 hadoop]# vim /etc/profile

#set Hadoop Path
export HADOOP_HOME=/usr/local/hadoop
export HADOOP_COMMON_HOME=${HADOOP_HOME}
export HADOOP_HDFS_HOME=${HADOOP_HOME}
export HADOOP_MAPRED_HOME=${HADOOP_HOME}
export HADOOP_YARN_HOME=${HADOOP_HOME}
export HADOOP_CONF_DIR=${HADOOP_HOME}/etc/hadoop
export HDFS_CONF_DIR=${HADOOP_HOME}/etc/hadoop
export YARN_CONF_DIR=${HADOOP_HOME}/etc/hadoop
export LD_LIBRARY_PATH=$HADOOP_HOME/lib/native:/usr/lib64

export HBASE_HOME=/usr/local/hbase
export HIVE_HOME=/usr/local/hive
export PATH=$PATH:$JAVA_HOME/bin:$HADOOP_HOME/bin:$HADOOP_HOME/sbin:$HBASE_HOME/bin:$HIVE_HOME/bin:/usr/local/zookeeper/bin
#拷贝
[hadoop@nn1 etc]$ cp ./profile /tmp
[hadoop@nn1 etc]$ cd ~/hadoop_base_op/
[hadoop@nn1 hadoop_base_op]$ sh scp_all.sh /tmp/profile /tmp
[hadoop@nn1 hadoop_base_op]$ sh ssh_root.sh cp /tmp/profile /etc/
[hadoop@nn1 hadoop_base_op]$ sh ssh_all.sh source /etc/profile

开启zookeeper

[hadoop@nn1 zookeeper_base_op]$ sh ssh_all.sh /usr/local/zookeeper/bin/zkServer.sh start 

hadoop安装

#分发
[hadoop@nn1 hadoop_base_op]$ sh scp_all.sh /tmp/upload/hadoop-2.7.3.tar.gz /tmp 
#解压
[hadoop@nn1 hadoop_base_op]$ sh ssh_root.sh tar -zxf /tmp/hadoop-2.7.3.tar.gz -C /usr/local/
#权限
[hadoop@nn1 hadoop_base_op]$ sh ssh_root.sh chown -R hadoop:hadoop /usr/local/hadoop-2.7.3/
#软连接
[hadoop@nn1 hadoop_base_op]$ sh ssh_root.sh ln -s /usr/local/hadoop-2.7.3/ /usr/local/hadoop
#770
[hadoop@nn1 hadoop_base_op]$ sh ssh_root.sh chmod -R 770 /usr/local/hadoop-2.7.3/
#归属用户hadoop
[hadoop@nn1 hadoop_base_op]$ sh ssh_root.sh chown -h hadoop:hadoop /usr/local/hadoop
#检查本地库支持情况
[hadoop@s3 ~]$ hadoop checknative
版权声明:原创作品,允许转载,转载时务必以超链接的形式表明出处和作者信息。否则将追究法律责任。来自海汼部落-123456789987654321,http://hainiubl.com/topics/75218
回复数量: 0
    暂无评论~~
    • 请注意单词拼写,以及中英文排版,参考此页
    • 支持 Markdown 格式, **粗体**、~~删除线~~、`单行代码`, 更多语法请见这里 Markdown 语法
    • 支持表情,可用Emoji的自动补全, 在输入的时候只需要 ":" 就可以自动提示了 :metal: :point_right: 表情列表 :star: :sparkles:
    • 上传图片, 支持拖拽和剪切板黏贴上传, 格式限制 - jpg, png, gif,教程
    • 发布框支持本地存储功能,会在内容变更时保存,「提交」按钮点击时清空
    Ctrl+Enter