You are on page 1of 5

############## Host file ##############

/etc/hosts
192.168.17.128 master
192.168.17.129 client1
192.168.17.130 client2

############## Keyless SSH ##############


master
-----------------------------------------------
# ssh-keygen -t rsa
# ssh-copy-id -i ~/.ssh/id_rsa.pub hduser@client1
# ssh-copy-id -i ~/.ssh/id_rsa.pub hduser@client2

client1
-----------------------------------------------
# ssh-keygen -t rsa
# ssh-copy-id -i ~/.ssh/id_rsa.pub hduser@master
# ssh-copy-id -i ~/.ssh/id_rsa.pub hduser@client2

client2
-----------------------------------------------
# ssh-keygen -t rsa
# ssh-copy-id -i ~/.ssh/id_rsa.pub hduser@master
# ssh-copy-id -i ~/.ssh/id_rsa.pub hduser@client1

############## Hadoop Download ##############


http://apache.stu.edu.tw/hadoop/common/stable/hadoop-2.7.3.tar.gz

############## Hadoop Install ##############


# mv hadoop-2.7.3.tar.gz /home/hduser
# ls
# tar -xvf hadoop-2.7.3.tar.gz
# rm -fr hadoop-2.7.3.tar.gz
$ ln -s hadoop-2.7.3 hadoop ### Creating link
$ chown -R hduser:hduser hadoop-2.7.3
$ chmod 777 hadoop-2.7.3
$ cd /home/hduser/hadoop/
$ ls

$ su
# vi .bashrc

~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
export HADOOP_HOME=$HOME/hadoop-2.7.3
export HADOOP_CONF_DIR=$HOME/hadoop-2.7.3/etc/hadoop
export HADOOP_MAPRED_HOME=$HOME/hadoop-2.7.3
export HADOOP_COMMON_HOME=$HOME/hadoop-2.7.3
export HADOOP_HDFS_HOME=$HOME/hadoop-2.7.3
export YARN_HOME=$HOME/hadoop-2.7.3
export PATH=$PATH:$HOME/hadoop-2.7.3/bin

export JAVA_HOME=/usr/java/jdk1.8.0_101
export PATH=/usr/java/jdk1.8.0_101/bin:$PATH
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
# exit
$ cd ~
$ source .bashrc
$ hadoop version
$ java -version

$ cd /home/hduser/hadoop-2.7.3/etc/hadoop
vi slaves
~~~~~~~~~~~
client1
client2
~~~~~~~~~~~
vi core-site.xml
<configuration>
<property>
<name>fs.defaultFS</name>
<value>hdfs://master:8020</value>
</property>
</configuration>

vi hdfs-site.xml ~~~~~~~~~~master
<configuration>
<property>
<name>dfs.replication</name>
<value>2</value>
</property>
<property>
<name>dfs.permissions</name>
<value>false</value>
</property>
<property>
<name>dfs.namenode.name.dir</name>
<value>/home/hduser/hadoop-data/namenode</value>
</property>
<property>
<name>dfs.datanode.data.dir</name>
<value>/home/hduser/hadoop-data/datanode</value>
</property>
</configuration>

vi hdfs-site.xml ~~~~~~~~~~client1 & client2

<configuration>
<property>
<name>dfs.replication</name>
<value>2</value>
</property>
<property>
<name>dfs.permissions</name>
<value>false</value>
</property>
<property>
<name>dfs.datanode.data.dir</name>
<value>/home/hduser/hadoop-data/datanode</value>
</property>
</configuration>

cp mapred-site.xml.template mapred-site.xml
vi mapred-site.xml
<configuration>
<property>
<name>mapreduce.framework.name</name>
<value>yarn</value>
</property>
</configuration>

vi yarn-site.xml

<configuration>
<property>
<name>yarn.resourcemanager.resource-tracker.address</name>
<value>master:8025</value>
</property>
<property>
<name>yarn.resourcemanager.scheduler.address</name>
<value>master:8030</value>
</property>
<property>
<name>yarn.resourcemanager.address</name>
<value>master:8050</value>
</property>
<property>
<name>yarn.resourcemanager.aux-services</name>
<value>mapreduce_shuffler</value>
</property>
<property>
<name>yarn.resourcemanager.aux-services.mapreduce.shuffle.class</name>
<value>org.apache.hadoop.mapred.ShuffleHandler</value>
</property>
<property>
<name>yarn.resourcemanager.disk-health-checker.min-healthy-disks</name>
<value>0</value>
</property>
</configuration>

scp core-site.xml mapred-site.xml hdfs-site.xml yarn-site.xml client1:/home/hdu


ser/hadoop/etc/hadoop

############## master ##############


mkdir -p /home/hduser/hadoop-data/namenode
cd ~
hdfs
hdfs namenode -format
~~~~~~~~~successful message: [main] common.Storage (NNStorage.java:format(568))
- Storage directory /tmp/hadoop-hduser/dfs/name has been successfully formatted.
hdfs getconf -namenodes
cd /home/hduser/hadoop/sbin
./start-dfs.sh
./hadoop-daemon.sh start datanode

jps
############## client1 & client2 ##############
mkdir -p /home/hduser/hadoop-data/datanode
chmod 755 /home/hduser/hadoop-data/datanode
jps
~~~~~~~~~~Manual start datanode if not started with namenode
cd /home/hduser/hadoop/sbin

Web Address: http://192.168.17.128:50070


http://192.168.17.128:8088

You might also like