Professional Documents
Culture Documents
https://www.oracle.com/in/java/technologies/javase/javase-jdk8-downloads.html
https://archive.apache.org/dist/hadoop/common/hadoop-2.6.0/hadoop-2.6.0.tar.gz
==============================================================
$ sudo apt-get update
$ ifconfig
eth1
inet 192.168.56.101 will show ip address of machine note down for further
process.
auto lo
iface lo inet loopback
auto eth1
change hostname by
ctrl+x y enter
$ ssh-keygen
$ ssh localhost
$ ssh-copy-id -i localhost
check and run
$ ssh localhost
$ ssh-copy-id -i masternode
$ ssh masternode
==========================================================
$ cd Downloads
home%masternode ~/Downloads $
export HADOOP_PREFIX="/opt/hadoop-2.6.0/"
export PATH=$PATH:$HADOOP_PREFIX/bin
export PATH=$PATH:$HADOOP_PREFIX/sbin
export HADOOP_COMMON_HOME=${HADOOP_PREFIX}
export HADOOP_MAPRED_HOME=${HADOOP_PREFIX}
export HADOOP_HDFS_HOME=${HADOOP_PREFIX}
export YARN_HOME=${HADOOP_PREFIX}
export JAVA_HOME="/usr/lib/jvm/jdk1.8.0_251/"
export PATH=$PATH:$JAVA_HOME/bin
now move to hadoop/etc/hadoop folder
Hadoop-env.sh
core-site.xml
<configuration>
<property>
<name>fs.defaultFS</name>
<value>hdfs://localhost:8020</value>
<final>true</final>
</property>
</configuration>
hdfs-site.xml
<configuration>
<property>
<name>dfs.replication</name>
<value>1</value>
</property>
<property>
<name>dfs.namenode.name.dir</name>
<value>file:///opt/hadoop-2.6.0/hadoop_data/dfs/name</value>
</property>
<property>
<name>dfs.blocksize</name>
<value>268435456</value>
</property>
<property>
<name>dfs.datanode.data.dir</name>
<value>file:///opt/hadoop-2.6.0/hadoop_data/dfs/data</value>
</property>
</configuration>
mapreduce-site.xml
<configuration>
<property>
<name>mapreduce.framework.name</name>
<value>yarn</value>
</property>
</configuration>
<configuration>
<property>
<name>yarn.nodemanager.aux-services</name>
<value>mapreduce_shuffle</value>
</property>
</configuration>
$ execstack -c /opt/hadoop-2.6.0/lib/native/libhdfs.so.0.0.0
$ hdfs namenode -format (this command should be applied only ones at first time,
unless it will remove all data in namenode)
$ start-dfs.sh or $ start-all.sh
$ start-yarn.sh
To fix this annoying warning, update your hadoop-env.sh file under etc/hadoop, and
replace following line