BIGTOP-1129. Cannot stop datanode through init script
[bigtop.git] / bigtop-packages / src / common / hadoop / install_hadoop.sh
1 #!/bin/bash -x
2 # Licensed to the Apache Software Foundation (ASF) under one or more
3 # contributor license agreements. See the NOTICE file distributed with
4 # this work for additional information regarding copyright ownership.
5 # The ASF licenses this file to You under the Apache License, Version 2.0
6 # (the "License"); you may not use this file except in compliance with
7 # the License. You may obtain a copy of the License at
8 #
9 # http://www.apache.org/licenses/LICENSE-2.0
10 #
11 # Unless required by applicable law or agreed to in writing, software
12 # distributed under the License is distributed on an "AS IS" BASIS,
13 # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
14 # See the License for the specific language governing permissions and
15 # limitations under the License.
16
17 set -ex
18
19 usage() {
20 echo "
21 usage: $0 <options>
22 Required not-so-options:
23 --distro-dir=DIR path to distro specific files (debian/RPM)
24 --build-dir=DIR path to hive/build/dist
25 --prefix=PREFIX path to install into
26
27 Optional options:
28 --native-build-string eg Linux-amd-64 (optional - no native installed if not set)
29 ... [ see source for more similar options ]
30 "
31 exit 1
32 }
33
34 OPTS=$(getopt \
35 -n $0 \
36 -o '' \
37 -l 'prefix:' \
38 -l 'distro-dir:' \
39 -l 'build-dir:' \
40 -l 'native-build-string:' \
41 -l 'installed-lib-dir:' \
42 -l 'hadoop-dir:' \
43 -l 'httpfs-dir:' \
44 -l 'hdfs-dir:' \
45 -l 'yarn-dir:' \
46 -l 'mapreduce-dir:' \
47 -l 'client-dir:' \
48 -l 'system-include-dir:' \
49 -l 'system-lib-dir:' \
50 -l 'system-libexec-dir:' \
51 -l 'hadoop-etc-dir:' \
52 -l 'httpfs-etc-dir:' \
53 -l 'doc-dir:' \
54 -l 'man-dir:' \
55 -l 'example-dir:' \
56 -l 'apache-branch:' \
57 -- "$@")
58
59 if [ $? != 0 ] ; then
60 usage
61 fi
62
63 eval set -- "$OPTS"
64 while true ; do
65 case "$1" in
66 --prefix)
67 PREFIX=$2 ; shift 2
68 ;;
69 --distro-dir)
70 DISTRO_DIR=$2 ; shift 2
71 ;;
72 --httpfs-dir)
73 HTTPFS_DIR=$2 ; shift 2
74 ;;
75 --hadoop-dir)
76 HADOOP_DIR=$2 ; shift 2
77 ;;
78 --hdfs-dir)
79 HDFS_DIR=$2 ; shift 2
80 ;;
81 --yarn-dir)
82 YARN_DIR=$2 ; shift 2
83 ;;
84 --mapreduce-dir)
85 MAPREDUCE_DIR=$2 ; shift 2
86 ;;
87 --client-dir)
88 CLIENT_DIR=$2 ; shift 2
89 ;;
90 --system-include-dir)
91 SYSTEM_INCLUDE_DIR=$2 ; shift 2
92 ;;
93 --system-lib-dir)
94 SYSTEM_LIB_DIR=$2 ; shift 2
95 ;;
96 --system-libexec-dir)
97 SYSTEM_LIBEXEC_DIR=$2 ; shift 2
98 ;;
99 --build-dir)
100 BUILD_DIR=$2 ; shift 2
101 ;;
102 --native-build-string)
103 NATIVE_BUILD_STRING=$2 ; shift 2
104 ;;
105 --doc-dir)
106 DOC_DIR=$2 ; shift 2
107 ;;
108 --hadoop-etc-dir)
109 HADOOP_ETC_DIR=$2 ; shift 2
110 ;;
111 --httpfs-etc-dir)
112 HTTPFS_ETC_DIR=$2 ; shift 2
113 ;;
114 --installed-lib-dir)
115 INSTALLED_LIB_DIR=$2 ; shift 2
116 ;;
117 --man-dir)
118 MAN_DIR=$2 ; shift 2
119 ;;
120 --example-dir)
121 EXAMPLE_DIR=$2 ; shift 2
122 ;;
123 --)
124 shift ; break
125 ;;
126 *)
127 echo "Unknown option: $1"
128 usage
129 exit 1
130 ;;
131 esac
132 done
133
134 for var in PREFIX BUILD_DIR; do
135 if [ -z "$(eval "echo \$$var")" ]; then
136 echo Missing param: $var
137 usage
138 fi
139 done
140
141 HADOOP_DIR=${HADOOP_DIR:-$PREFIX/usr/lib/hadoop}
142 HDFS_DIR=${HDFS_DIR:-$PREFIX/usr/lib/hadoop-hdfs}
143 YARN_DIR=${YARN_DIR:-$PREFIX/usr/lib/hadoop-yarn}
144 MAPREDUCE_DIR=${MAPREDUCE_DIR:-$PREFIX/usr/lib/hadoop-mapreduce}
145 CLIENT_DIR=${CLIENT_DIR:-$PREFIX/usr/lib/hadoop/client}
146 HTTPFS_DIR=${HTTPFS_DIR:-$PREFIX/usr/lib/hadoop-httpfs}
147 SYSTEM_LIB_DIR=${SYSTEM_LIB_DIR:-/usr/lib}
148 BIN_DIR=${BIN_DIR:-$PREFIX/usr/bin}
149 DOC_DIR=${DOC_DIR:-$PREFIX/usr/share/doc/hadoop}
150 MAN_DIR=${MAN_DIR:-$PREFIX/usr/man}
151 SYSTEM_INCLUDE_DIR=${SYSTEM_INCLUDE_DIR:-$PREFIX/usr/include}
152 SYSTEM_LIBEXEC_DIR=${SYSTEM_LIBEXEC_DIR:-$PREFIX/usr/libexec}
153 EXAMPLE_DIR=${EXAMPLE_DIR:-$DOC_DIR/examples}
154 HADOOP_ETC_DIR=${HADOOP_ETC_DIR:-$PREFIX/etc/hadoop}
155 HTTPFS_ETC_DIR=${HTTPFS_ETC_DIR:-$PREFIX/etc/hadoop-httpfs}
156 BASH_COMPLETION_DIR=${BASH_COMPLETION_DIR:-$PREFIX/etc/bash_completion.d}
157
158 INSTALLED_HADOOP_DIR=${INSTALLED_HADOOP_DIR:-/usr/lib/hadoop}
159 HADOOP_NATIVE_LIB_DIR=${HADOOP_DIR}/lib/native
160
161 ##Needed for some distros to find ldconfig
162 export PATH="/sbin/:$PATH"
163
164 # Make bin wrappers
165 mkdir -p $BIN_DIR
166
167 for component in $HADOOP_DIR/bin/hadoop $HDFS_DIR/bin/hdfs $YARN_DIR/bin/yarn $MAPREDUCE_DIR/bin/mapred ; do
168 wrapper=$BIN_DIR/${component#*/bin/}
169 cat > $wrapper <<EOF
170 #!/bin/bash
171
172 # Autodetect JAVA_HOME if not defined
173 . /usr/lib/bigtop-utils/bigtop-detect-javahome
174
175 export HADOOP_LIBEXEC_DIR=/${SYSTEM_LIBEXEC_DIR#${PREFIX}}
176
177 exec ${component#${PREFIX}} "\$@"
178 EOF
179 chmod 755 $wrapper
180 done
181
182 #libexec
183 install -d -m 0755 ${SYSTEM_LIBEXEC_DIR}
184 cp ${BUILD_DIR}/libexec/* ${SYSTEM_LIBEXEC_DIR}/
185 cp ${DISTRO_DIR}/hadoop-layout.sh ${SYSTEM_LIBEXEC_DIR}/
186 install -m 0755 ${DISTRO_DIR}/init-hdfs.sh ${SYSTEM_LIBEXEC_DIR}/
187
188 # hadoop jar
189 install -d -m 0755 ${HADOOP_DIR}
190 cp ${BUILD_DIR}/share/hadoop/common/*.jar ${HADOOP_DIR}/
191 cp ${BUILD_DIR}/share/hadoop/common/lib/hadoop-auth*.jar ${HADOOP_DIR}/
192 cp ${BUILD_DIR}/share/hadoop/mapreduce/lib/hadoop-annotations*.jar ${HADOOP_DIR}/
193 install -d -m 0755 ${MAPREDUCE_DIR}
194 cp ${BUILD_DIR}/share/hadoop/mapreduce/hadoop-mapreduce*.jar ${MAPREDUCE_DIR}
195 cp ${BUILD_DIR}/share/hadoop/tools/lib/*.jar ${MAPREDUCE_DIR}
196 install -d -m 0755 ${HDFS_DIR}
197 cp ${BUILD_DIR}/share/hadoop/hdfs/*.jar ${HDFS_DIR}/
198 install -d -m 0755 ${YARN_DIR}
199 cp ${BUILD_DIR}/share/hadoop/yarn/hadoop-yarn*.jar ${YARN_DIR}/
200 chmod 644 ${HADOOP_DIR}/*.jar ${MAPREDUCE_DIR}/*.jar ${HDFS_DIR}/*.jar ${YARN_DIR}/*.jar
201
202 # lib jars
203 install -d -m 0755 ${HADOOP_DIR}/lib
204 cp ${BUILD_DIR}/share/hadoop/common/lib/*.jar ${HADOOP_DIR}/lib
205 install -d -m 0755 ${MAPREDUCE_DIR}/lib
206 cp ${BUILD_DIR}/share/hadoop/mapreduce/lib/*.jar ${MAPREDUCE_DIR}/lib
207 install -d -m 0755 ${HDFS_DIR}/lib
208 cp ${BUILD_DIR}/share/hadoop/hdfs/lib/*.jar ${HDFS_DIR}/lib
209 install -d -m 0755 ${YARN_DIR}/lib
210 cp ${BUILD_DIR}/share/hadoop/yarn/lib/*.jar ${YARN_DIR}/lib
211 chmod 644 ${HADOOP_DIR}/lib/*.jar ${MAPREDUCE_DIR}/lib/*.jar ${HDFS_DIR}/lib/*.jar ${YARN_DIR}/lib/*.jar
212
213 # Install webapps
214 cp -ra ${BUILD_DIR}/share/hadoop/hdfs/webapps ${HDFS_DIR}/
215
216 # bin
217 install -d -m 0755 ${HADOOP_DIR}/bin
218 cp -a ${BUILD_DIR}/bin/{hadoop,rcc,fuse_dfs} ${HADOOP_DIR}/bin
219 install -d -m 0755 ${HDFS_DIR}/bin
220 cp -a ${BUILD_DIR}/bin/hdfs ${HDFS_DIR}/bin
221 install -d -m 0755 ${YARN_DIR}/bin
222 cp -a ${BUILD_DIR}/bin/{yarn,container-executor} ${YARN_DIR}/bin
223 install -d -m 0755 ${MAPREDUCE_DIR}/bin
224 cp -a ${BUILD_DIR}/bin/mapred ${MAPREDUCE_DIR}/bin
225 # FIXME: MAPREDUCE-3980
226 cp -a ${BUILD_DIR}/bin/mapred ${YARN_DIR}/bin
227
228 # sbin
229 install -d -m 0755 ${HADOOP_DIR}/sbin
230 cp -a ${BUILD_DIR}/sbin/{hadoop-daemon,hadoop-daemons,slaves}.sh ${HADOOP_DIR}/sbin
231 install -d -m 0755 ${HDFS_DIR}/sbin
232 cp -a ${BUILD_DIR}/sbin/{distribute-exclude,refresh-namenodes}.sh ${HDFS_DIR}/sbin
233 install -d -m 0755 ${YARN_DIR}/sbin
234 cp -a ${BUILD_DIR}/sbin/{yarn-daemon,yarn-daemons}.sh ${YARN_DIR}/sbin
235 install -d -m 0755 ${MAPREDUCE_DIR}/sbin
236 cp -a ${BUILD_DIR}/sbin/mr-jobhistory-daemon.sh ${MAPREDUCE_DIR}/sbin
237
238 # native libs
239 install -d -m 0755 ${SYSTEM_LIB_DIR}
240 install -d -m 0755 ${HADOOP_NATIVE_LIB_DIR}
241 for library in libhdfs.so.0.0.0; do
242 cp ${BUILD_DIR}/lib/native/${library} ${SYSTEM_LIB_DIR}/
243 ldconfig -vlN ${SYSTEM_LIB_DIR}/${library}
244 ln -s ${library} ${SYSTEM_LIB_DIR}/${library/.so.*/}.so
245 done
246
247 install -d -m 0755 ${SYSTEM_INCLUDE_DIR}
248 cp ${BUILD_DIR}/include/hdfs.h ${SYSTEM_INCLUDE_DIR}/
249
250 cp ${BUILD_DIR}/lib/native/*.a ${HADOOP_NATIVE_LIB_DIR}/
251 for library in `cd ${BUILD_DIR}/lib/native ; ls libsnappy.so.1.* 2>/dev/null` libhadoop.so.1.0.0; do
252 cp ${BUILD_DIR}/lib/native/${library} ${HADOOP_NATIVE_LIB_DIR}/
253 ldconfig -vlN ${HADOOP_NATIVE_LIB_DIR}/${library}
254 ln -s ${library} ${HADOOP_NATIVE_LIB_DIR}/${library/.so.*/}.so
255 done
256
257 # Install fuse wrapper
258 fuse_wrapper=${BIN_DIR}/hadoop-fuse-dfs
259 cat > $fuse_wrapper << EOF
260 #!/bin/bash
261
262 /sbin/modprobe fuse
263
264 # Autodetect JAVA_HOME if not defined
265 . /usr/lib/bigtop-utils/bigtop-detect-javahome
266
267 export HADOOP_HOME=\${HADOOP_HOME:-${HADOOP_DIR#${PREFIX}}}
268
269 BIGTOP_DEFAULTS_DIR=\${BIGTOP_DEFAULTS_DIR-/etc/default}
270 [ -n "\${BIGTOP_DEFAULTS_DIR}" -a -r \${BIGTOP_DEFAULTS_DIR}/hadoop-fuse ] && . \${BIGTOP_DEFAULTS_DIR}/hadoop-fuse
271
272 export HADOOP_LIBEXEC_DIR=${SYSTEM_LIBEXEC_DIR#${PREFIX}}
273
274 if [ "\${LD_LIBRARY_PATH}" = "" ]; then
275 export JAVA_NATIVE_LIBS="libjvm.so"
276 . /usr/lib/bigtop-utils/bigtop-detect-javalibs
277 export LD_LIBRARY_PATH=\${JAVA_NATIVE_PATH}:/usr/lib
278 fi
279
280 # Pulls all jars from hadoop client package
281 for jar in \${HADOOP_HOME}/client/*.jar; do
282 CLASSPATH+="\$jar:"
283 done
284 CLASSPATH="/etc/hadoop/conf:\${CLASSPATH}"
285
286 env CLASSPATH="\${CLASSPATH}" \${HADOOP_HOME}/bin/fuse_dfs \$@
287 EOF
288
289 chmod 755 $fuse_wrapper
290
291 # Bash tab completion
292 install -d -m 0755 $BASH_COMPLETION_DIR
293 install -m 0644 \
294 hadoop-common-project/hadoop-common/src/contrib/bash-tab-completion/hadoop.sh \
295 $BASH_COMPLETION_DIR/hadoop
296
297 # conf
298 install -d -m 0755 $HADOOP_ETC_DIR/conf.empty
299 cp ${DISTRO_DIR}/conf.empty/mapred-site.xml $HADOOP_ETC_DIR/conf.empty
300 # disable everything that's definied in hadoop-env.sh
301 # so that it can still be used as example, but doesn't affect anything
302 # by default
303 sed -i -e '/^[^#]/s,^,#,' ${BUILD_DIR}/etc/hadoop/hadoop-env.sh
304 cp ${BUILD_DIR}/etc/hadoop/* $HADOOP_ETC_DIR/conf.empty
305
306 # docs
307 install -d -m 0755 ${DOC_DIR}
308 cp -r ${BUILD_DIR}/share/doc/* ${DOC_DIR}/
309
310 # man pages
311 mkdir -p $MAN_DIR/man1
312 gzip -c < $DISTRO_DIR/hadoop.1 > $MAN_DIR/man1/hadoop.1.gz
313 chmod 644 $MAN_DIR/man1/hadoop.1.gz
314
315 # HTTPFS
316 install -d -m 0755 ${HTTPFS_DIR}/sbin
317 cp ${BUILD_DIR}/sbin/httpfs.sh ${HTTPFS_DIR}/sbin/
318 cp -r ${BUILD_DIR}/share/hadoop/httpfs/tomcat/webapps ${HTTPFS_DIR}/webapps
319 install -d -m 0755 ${PREFIX}/var/lib/hadoop-httpfs
320 install -d -m 0755 $HTTPFS_ETC_DIR/conf.empty
321 install -d -m 0755 $HTTPFS_ETC_DIR/tomcat-deployment.dist
322 cp -r ${BUILD_DIR}/share/hadoop/httpfs/tomcat/conf $HTTPFS_ETC_DIR/tomcat-deployment.dist/
323 chmod 644 $HTTPFS_ETC_DIR/tomcat-deployment.dist/conf/*
324 mv $HADOOP_ETC_DIR/conf.empty/httpfs* $HTTPFS_ETC_DIR/conf.empty
325 sed -i -e '/<\/configuration>/i\
326 <!-- HUE proxy user setting -->\
327 <property>\
328 <name>httpfs.proxyuser.hue.hosts</name>\
329 <value>*</value>\
330 </property>\
331 <property>\
332 <name>httpfs.proxyuser.hue.groups</name>\
333 <value>*</value>\
334 </property>\
335 \
336 <property>\
337 <name>httpfs.hadoop.config.dir</name>\
338 <value>/etc/hadoop/conf</value>\
339 </property>' $HTTPFS_ETC_DIR/conf.empty/httpfs-site.xml
340
341 # Make the pseudo-distributed config
342 for conf in conf.pseudo ; do
343 install -d -m 0755 $HADOOP_ETC_DIR/$conf
344 # Install the upstream config files
345 cp ${BUILD_DIR}/etc/hadoop/* $HADOOP_ETC_DIR/$conf
346 # Remove the ones that shouldn't be installed
347 rm -rf $HADOOP_ETC_DIR/$conf/httpfs*
348 # Overlay the -site files
349 (cd $DISTRO_DIR/$conf && tar -cf - .) | (cd $HADOOP_ETC_DIR/$conf && tar -xf -)
350 chmod -R 0644 $HADOOP_ETC_DIR/$conf/*
351 # When building straight out of svn we have to account for pesky .svn subdirs
352 rm -rf `find $HADOOP_ETC_DIR/$conf -name .svn -type d`
353 done
354 cp ${BUILD_DIR}/etc/hadoop/log4j.properties $HADOOP_ETC_DIR/conf.pseudo
355
356 # FIXME: Provide a convenience link for configuration (HADOOP-7939)
357 install -d -m 0755 ${HADOOP_DIR}/etc
358 ln -s ${HADOOP_ETC_DIR##${PREFIX}}/conf ${HADOOP_DIR}/etc/hadoop
359 install -d -m 0755 ${YARN_DIR}/etc
360 ln -s ${HADOOP_ETC_DIR##${PREFIX}}/conf ${YARN_DIR}/etc/hadoop
361
362 # Create log, var and lib
363 install -d -m 0755 $PREFIX/var/{log,run,lib}/hadoop-hdfs
364 install -d -m 0755 $PREFIX/var/{log,run,lib}/hadoop-yarn
365 install -d -m 0755 $PREFIX/var/{log,run,lib}/hadoop-mapreduce
366
367 # Remove all source and create version-less symlinks to offer integration point with other projects
368 for DIR in ${HADOOP_DIR} ${HDFS_DIR} ${YARN_DIR} ${MAPREDUCE_DIR} ${HTTPFS_DIR} ; do
369 (cd $DIR &&
370 rm -fv *-sources.jar
371 rm -fv lib/hadoop-*.jar
372 for j in hadoop-*.jar; do
373 if [[ $j =~ hadoop-(.*)-${HADOOP_VERSION}.jar ]]; then
374 name=${BASH_REMATCH[1]}
375 ln -s $j hadoop-$name.jar
376 fi
377 done)
378 done
379
380 # Now create a client installation area full of symlinks
381 install -d -m 0755 ${CLIENT_DIR}
382 for file in `cat ${BUILD_DIR}/hadoop-client.list` ; do
383 for dir in ${HADOOP_DIR}/{lib,} ${HDFS_DIR}/{lib,} ${YARN_DIR}/{lib,} ${MAPREDUCE_DIR}/{lib,} ; do
384 [ -e $dir/$file ] && \
385 ln -fs ${dir#$PREFIX}/$file ${CLIENT_DIR}/${file} && \
386 ln -fs ${dir#$PREFIX}/$file ${CLIENT_DIR}/${file/-[[:digit:]]*/.jar} && \
387 continue 2
388 done
389 exit 1
390 done