hadoop入门之搭建hadoop集群环境,想要学习hadoop的朋友可以先从搭建hadoop集群开始慢慢理解和使用hadoop。
hadoop 必须要用hostname节点互相访问,写ip是不行的
vi /etc/hosts
#在hosts文件中增加下面内容
192.168.1.21 node21
192.168.1.22 node22
192.168.1.23 node23
ssh-keygen -t rsa(回车到结束)
ssh-copy-id 0.0.0.0(根据提示输入密码)
ssh-copy-id 192.168.1.22(根据提示输入密码)
ssh-copy-id 192.168.1.23(根据提示输入密码)
mkdir -p /opt/java
tar -xzvf jdk1.7.tar.gz -C /opt/java
ln -s /opt/java/jdk1.7XXXXX /opt/java/jdk
export JAVA_HOME=/opt/java/jdk
export PATH=$PATH:$JAVA_HOME/bin
export HADOOP_HOME=/home/hadoop
export PATH=$PATH:$HADOOP_HOME/bin:$PATH:HADOOP_HOME/sbin
hadoop/etc/hadoop/文件夹下面的几个文件增加下面配置
<configuration>
<property>
<name>fs.defaultFSname>
<value>hdfs://node21:9000value>
property>
<property>
<name>io.file.buffer.sizename>
<value>131072value>
property>
<property>
<name>hadoop.tmp.dirname>
<value>file:/home/hadoop/tmpvalue>
property>
configuration>
<configuration>
<property>
<name>dfs.namenode.name.dirname>
<value>file:/home/hadoop/dfs/namevalue>
property>
<property>
<name>dfs.namenode.data.dirname>
<value>file:/home/hadoop/dfs/datavalue>
property>
<property>
<name>dfs.replicationname>
<value>2value>
property>
<property>
<name>dfs.namenode.secondary.http-addressname>
<value>node21:9001value>
property>
<property>
<name>dfs.webhdfs.enabledname>
<value>truevalue>
property>
configuration>
<configuration>
<property>
<name>yarn.resourcemanager.addressname>
<value>node21:8032value>
property>
<property>
<name>yarn.resourcemanager.scheduler.addressname>
<value>node21:8030value>
property>
<property>
<name>yarn.resourcemanager.resource-tracker.addressname>
<value>node21:8031value>
property>
<property>
<name>yarn.resourcemanager.admin.addressname>
<value>node21:8033value>
property>
<property>
<name>yarn.resourcemanager.webapp.addressname>
<value>node21:8088value>
property>
<property>
<name>yarn.nodemanager.aux-servicesname>
<value>mapreduce_shufflevalue>
property>
<property>
<name>yarn.nodemanager.aux-services.mapreduce.shuffle.classname>
<value>org.apache.hadoop.mapred.ShuffleHandlervalue>
property>
configuration>
<configuration>
<property>
<name>mapreduce.framework.namename>
<value>yarnvalue>
property>
<property>
<name>mapreduce.jobhistory.addressname>
<value>node21:10020value>
property>
<property>
<name>mapreduce.jobhistory.webapp.addressname>
<value>node21:19888value>
property>
configuration>
node22
node23
scp /etc/profile 192.168.1.22:/etc
scp /etc/profile 192.168.1.23:/etc
scp -r /opt/java 192.168.1.22:/opt
scp -r /opt/java 192.168.1.23:/opt
scp -r /home/hadoop 192.168.1.22:/home
scp -r /home/hadoop 192.168.1.23:/home
scp /etc/hosts 192.168.1.22:/etc
scp /etc/hosts 192.168.1.23:/etc
hdfs namenode -format
/home/hadoop/sbin/start-dfs.sh
/home/hadoop/sbin/start-yarn.sh
yarn:http://192.168.1.21:8088
hdfs:http://192.168.1.21:50070