-
Notifications
You must be signed in to change notification settings - Fork 0
/
install_hadoop.sh
149 lines (107 loc) · 3.92 KB
/
install_hadoop.sh
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
#!/bin/bash
echo "This script will install hadoop on a fresh docker instance of centos 7"
if [[ $EUID -ne 0 ]]; then
echo "You must be a root user to use this script"
exit 1
fi
curl https://raw.githubusercontent.com/abhishekunotech/install_scripts/master/installpasswordless.sh > installpass.sh
chmod +x installpass.sh
./installpass.sh
echo -e "We are inside install hadoop"
echo $JAVA_HOME
cd /opt
mkdir hadoop && cd hadoop
wget http://redrockdigimark.com/apachemirror/hadoop/common/hadoop-2.7.3/hadoop-2.7.3.tar.gz
tar -zxvf hadoop-2.7.3.tar.gz
#curl https://raw.githubusercontent.com/abhishekunotech/install_scripts/master/setHadoopEnv.sh > setHadoopEnv.sh
#source setHadoopEnv.sh
export JAVA_HOME=/opt/jdk1.8.0_111/bin
export HADOOP_INSTALL=/opt/hadoop/hadoop-2.7.3
export HADOOP_PREFIX=/opt/hadoop/hadoop-2.7.3
export HADOOP_HOME=/opt/hadoop/hadoop-2.7.3
export HADOOP_MAPRED_HOME=$HADOOP_HOME
export HADOOP_COMMON_HOME=$HADOOP_HOME
export HADOOP_HDFS_HOME=$HADOOP_HOME
export YARN_HOME=$HADOOP_HOME
export HADOOP_COMMON_LIB_NATIVE_DIR=$HADOOP_HOME/lib/native
export PATH=$HADOOP_INSTALL/bin:$PATH
export PATH=$HADOOP_INSTALL/sbin:$PATH
echo "export JAVA_HOME=/opt/jdk1.8.0_111/bin" >> /etc/bashrc
echo "export HADOOP_INSTALL=/opt/hadoop/hadoop-2.7.3" >> /etc/bashrc
echo "export HADOOP_PREFIX=/opt/hadoop/hadoop-2.7.3" >> /etc/bashrc
echo "export HADOOP_HOME=/opt/hadoop/hadoop-2.7.3" >> /etc/bashrc
echo "export HADOOP_MAPRED_HOME=$HADOOP_HOME" >> /etc/bashrc
echo "export HADOOP_COMMON_HOME=$HADOOP_HOME" >> /etc/bashrc
echo "export HADOOP_HDFS_HOME=$HADOOP_HOME" >> /etc/bashrc
echo "export YARN_HOME=$HADOOP_HOME" >> /etc/bashrc
echo "export HADOOP_COMMON_LIB_NATIVE_DIR=$HADOOP_HOME/lib/native" >> /etc/bashrc
echo "export PATH=$HADOOP_INSTALL/bin:$PATH" >> /etc/bashrc
echo "export PATH=$HADOOP_INSTALL/sbin:$PATH" >> /etc/bashrc
source /etc/bashrc
hadoop version
cd $HADOOP_HOME/etc/hadoop
#vim hadoop-env.sh
#line_num_conf_first=`grep -wn configuration meow.txt | head -1 | gawk '{split($1,a,":",seps); print a[1]}' | gawk '{print strtonum($1)}'`
sed -i -r "s/<configuration>//gi" core-site.xml
sed -i -r "s/<\/configuration>//gi" core-site.xml
cat <<EOF >>core-site.xml
<configuration>
<property>
<name>fs.default.name</name>
<value>hdfs://localhost:54310</value>
<description>The name of the default file system. A URI whose
scheme and authority determine the FileSystem implementation. The
uri's scheme determines the config property (fs.SCHEME.impl) naming
the FileSystem implementation class. The uri's authority is used to
determine the host, port, etc. for a filesystem.</description>
</property>
</configuration>
EOF
sed -i -r "s/<configuration>//gi" hdfs-site.xml
sed -i -r "s/<\/configuration>//gi" hdfs-site.xml
cat <<EOF >>hdfs-site.xml
<configuration>
<property>
<name>dfs.replication</name>
<value>1</value>
<description>Default block replication.
The actual number of replications can be specified when the file is created.
The default is used if replication is not specified in create time.
</description>
</property>
<property>
<name>dfs.name.dir</name>
<value>/opt/hadoop/namenode</value>
</property>
<property>
<name>dfs.data.dir</name>
<value>/opt/hadoop/datanode</value>
</property>
<property>
<name>dfs.namenode.datanode.registration.ip-hostname-check</name>
<value>false</value>
</property>
</configuration>
EOF
cp mapred-site.xml.template mapred-site.xml
sed -i -r "s/<configuration>//gi" mapred-site.xml
sed -i -r "s/<\/configuration>//gi" mapred-site.xml
cat <<EOF >>mapred-site.xml
<configuration>
<property>
<name>mapred.job.tracker</name>
<value>localhost:54311</value>
<description>The host and port that the MapReduce job tracker runs
at. If "local", then jobs are run in-process as a single map
and reduce task.
</description>
</property>
</configuration>
EOF
hadoop namenode -format
start-all.sh
jps
curl localhost:50070
figlet -f banner "HADOOP Installed :-)"
figlet -f small "FINISHED Installing Hadoop ..."
sleep 1