Install Hadoop Cluster with Docker


name@host:/#wget https://raw.github.com/zettio/weave/master/weave
name@host:/#sudo mv weave /usr/local/bin/
name@host:/#sudo chmod +x /usr/local/bin/weave
name@host:/#weave launch
name@host:/#eval $(weave env)
name@host:/#docker run --name=hnode1 --hostname=hnode1.weave.local -it ubuntu /bin/bash
name@host:/#eval $(weave env)
name@host:/#docker run --name=hnode2 --hostname=hnode2.weave.local -it ubuntu /bin/bash

root@hnode1:~#passwd
Enter new UNIX password:root
root@hnode1:~#sudo apt-get update
root@hnode1:~#apt-get install ssh-server vim rsync
root@hnode1:~#sed -i 's/PermitRootLogin without-password/PermitRootLogin yes/' /etc/ssh/sshd_config
root@hnode1:~#/etc/init.d/ssh start
root@hnode1:~#ssh-keygen

root@hnode2:~#passwd
Enter new UNIX password:root
root@hnode2:~#sudo apt-get update
root@hnode2:~#apt-get install ssh-server vim rsync
root@hnode2:~#sed -i 's/PermitRootLogin without-password/PermitRootLogin yes/' /etc/ssh/sshd_config
root@hnode2:~#/etc/init.d/ssh start
root@hnode2:~#ssh-keygen

root@hnode1:/opt# cat /root/.ssh/id_rsa.pub >> ~/.ssh/authorized_keys
root@hnode1:/opt# chmod 700 ~/.ssh
root@hnode1:/opt# chmod 600 ~/.ssh/authorized_keys
root@hnode1:~#scp /root/.ssh/id_rsa.pub root@hnode2:/tmp

root@hnode2:~#scp /root/.ssh/id_rsa.pub root@hnode1:/tmp
root@hnode1:~#cat /tmp/id_rsa.pub >> ~/.ssh/authorized_keys

root@hnode2:/opt# cat /root/.ssh/id_rsa.pub >> ~/.ssh/authorized_keys
root@hnode2:~#cat /tmp/id_rsa.pub >> ~/.ssh/authorized_keys
root@hnode2:/opt# cat /root/.ssh/id_rsa.pub >> ~/.ssh/authorized_keys
root@hnode2:~#chmod 700 ~/.ssh
root@hnode2:~#chmod 600 ~/.ssh/authorized_keys

root@hnode1:/# wget http://apache.openmirror.de/hadoop/common/hadoop-2.6.2/hadoop-2.6.2.tar.gz
root@hnode1:/#tar -xvf hadoop-2.6.2.tar.gz
root@hnode1:/#mv hadoop-2.6.2 /opt/

root@hnode1:/# vim /opt/hadoop-2.6.2/etc/hadoop/core-site.xml

<property>
<name>fs.defaultFS</name>
<value>hdfs://hnode1.weave.local/</value>
</property>

root@hnode1:/# vim /opt/hadoop-2.6.2/etc/hadoop/hdfs-site.xml

<property>
<name>dfs.namenode.name.dir</name>
<value>/disk1/hdfs/name,/disk1/hdfs/name,/remote/hdfs/name</value>
</property>

root@hnode1:/# vim /opt/hadoop-2.6.2/etc/hadoop/yarn-site.xml

<property>
<name>yarn.resourcemanager.hostname</name>
<value>node1.weave.local</value>
</property>
<property>
<name>yarn.nodemanager.local-dirs</name>
<value>/disk1/nm-local-dir,/disk2/nm-local-dir</value>
</property>
<property>
<name>yarn.nodemanager.aux-services</name>
<value>mapreduce.shuffle</value>
</property>
<property>
<name>yarn.nodemanager.resource.memory-mb</name>
<value>16384</value>
</property>
<property>
<name>yarn.nodemanager.resource.cpu-vcores</name>
<value>16</value>
</property>

root@hnode1:/# mkdir -p /disk1/hdfs/name /remote/hdfs/name /disk1/nm-local-dir /disk2/nm-local-dir

root@hnode2:/# wget http://apache.openmirror.de/hadoop/common/hadoop-2.6.2/hadoop-2.6.2.tar.gz
root@hnode2:/#tar -xvf hadoop-2.6.2.tar.gz
root@hnode2:/#mv hadoop-2.6.2 /opt/

root@hnode2:/# vim /opt/hadoop-2.6.2/etc/hadoop/core-site.xml

<property>
<name>fs.defaultFS</name>
<value>hdfs://hnode1.weave.local/</value>
</property>

root@hnode2:/# vim /opt/hadoop-2.6.2/etc/hadoop/hdfs-site.xml

<property>
<name>dfs.datanode.data.dir</name>
<value>/disk1/hdfs/data,/disk2/hdfs/data</value></property>

root@hnode2:/# mkdir -p /disk1/hdfs/data /disk2/hdfs/data

name@host:/#HN1_IP=$(docker inspect -f '{{ .NetworkSettings.IPAddress }}' hnode1)

name@host:/#HN2_IP=$(docker inspect -f '{{ .NetworkSettings.IPAddress }}' hnode2)

name@host:/#scp ~/Downloads/jdk-8u65-linux-x64.tar.gz root@$HN1_IP:/opt

name@host:/#scp ~/Downloads/jdk-8u65-linux-x64.tar.gz root@$HN2_IP:/opt

root@hnode1:/opt# tar -xvf jdk-8u65-linux-x64.tar.gz
root@hnode1:~# echo "JAVA_HOME=jdk1.8.0_65" >> /etc/environment
root@hnode1:~# echo "PATH=$JAVA_HOME/bin:$PATH" >> /etc/environment
root@hnode1:~#source /etc/environment
root@hnode1:~# export JAVA_HOME=jdk1.8.0_65

root@hnode2:/opt# tar -xvf jdk-8u65-linux-x64.tar.gz
root@hnode2:~# echo "JAVA_HOME=jdk1.8.0_65" >> /etc/environment
root@hnode2:~# echo "PATH=$JAVA_HOME/bin:$PATH" >> /etc/environment
root@hnode2:~#source /etc/environment
root@hnode2:~# export JAVA_HOME=jdk1.8.0_65

root@hnode1:/# /opt/hadoop-2.6.2/bin/hdfs namenode -format mycluster

root@hnode1:~# /opt/hadoop-2.6.2/sbin/start-dfs.sh
root@hnode2:~# /opt/hadoop-2.6.2/sbin/start-dfs.sh

Advertisements

Leave a Reply

Fill in your details below or click an icon to log in:

WordPress.com Logo

You are commenting using your WordPress.com account. Log Out / Change )

Twitter picture

You are commenting using your Twitter account. Log Out / Change )

Facebook photo

You are commenting using your Facebook account. Log Out / Change )

Google+ photo

You are commenting using your Google+ account. Log Out / Change )

Connecting to %s