本文共 2398 字,大约阅读时间需要 7 分钟。
#生成密钥(node1,node2,node3,node4)ssh-keygen -t dsa -P '' -f ~/.ssh/id_dsacd ~/.ssh/ls#说明##id_ds 私钥##is_dsa.pub 公钥#将公钥文件追加到本地的认证文件中(node1,node2,node3,node4)cat ~/.ssh/id_dsa.pub >> ~/.ssh/authorized_keysssh 192.168.2.136#Last login: Sun Jul 9 14:14:35 2017 from 192.168.2.136,表示登录成功,并且是不需要输入密码的exit#logout 退出将node1中的公钥复制到node2,node3,node4中 scp ./id_dsa.pub root@192.168.2.137:/opt/ scp ./id_dsa.pub root@192.168.2.138:/opt/ scp ./id_dsa.pub root@192.168.2.139:/opt/将/opt/id_dsa.pub添加到node2,node3,node4的认证文件中 cat /opt/id_dsa.pub >> ~/.ssh/authorized_keys
将hadoop压缩包上传到node1的/root目录tar -zxvf hadoop-2.5.1_x64.tar.gzmv hadoop-2.5.1 /home/cd /home/lscd hadoop-2.5.1lscd etc/hadoop#配置1,修改hadoopenv.shvi hadoop-env.sh#将jdk的安装目录添加到这个文件export JAVA_HOME=/usr/java/jdk1.7.0_79#配置2,修改core-site.xml vi core-site.xml#配置3,修改hdfs-site.xml vi hdfs-site.xml fs.defaultFS hdfs://192.168.2.136:9000 hadoop.tmp.dir /opt/hadoop-2.5 #配置4,修改slaves vi slaves 192.168.2.137 192.168.2.138 192.168.2.139 #配置5,修改masters vi masters 192.168.2.137 dfs.namenode.secondary.http-address 192.168.2.137:50090 dfs.namenode.secondary.https-address 192.168.2.137:50091
scp -r hadoop-2.5.1/ root@192.168.2.137:/home/scp -r hadoop-2.5.1/ root@192.168.2.138:/home/scp -r hadoop-2.5.1/ root@192.168.2.139:/home/
vi ~/.bash_profileexport HADOOP_HOME=/home/hadoop-2.5.1export PATH=$PATH:$HADOOP_HOME/bin:$HADOOP_HOME/sbin
scp ~/.bash_profile root@192.168.2.137:/root/scp ~/.bash_profile root@192.168.2.138:/root/scp ~/.bash_profile root@192.168.2.139:/root/
source ~/.bash_profile
hdfs namenode -format
cd /opt/hadoop-2.5/dfs/name/currentls -l
start-dfs.sh#start-all.sh
service iptables stop
http://192.168.2.136:50070/dfshealth.html#tab-overview
转载地址:http://eveql.baihongyu.com/