1 | # |
---|
2 | # Copyright 2005 The Apache Software Foundation |
---|
3 | # |
---|
4 | # Licensed under the Apache License, Version 2.0 (the "License"); |
---|
5 | # you may not use this file except in compliance with the License. |
---|
6 | # You may obtain a copy of the License at |
---|
7 | # |
---|
8 | # http://www.apache.org/licenses/LICENSE-2.0 |
---|
9 | # |
---|
10 | # Unless required by applicable law or agreed to in writing, software |
---|
11 | # distributed under the License is distributed on an "AS IS" BASIS, |
---|
12 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. |
---|
13 | # See the License for the specific language governing permissions and |
---|
14 | # limitations under the License. |
---|
15 | # |
---|
16 | |
---|
17 | # |
---|
18 | # Note: This script depends on 5 environment variables to function correctly: |
---|
19 | # a) CLASSPATH |
---|
20 | # b) HADOOP_HOME |
---|
21 | # c) HADOOP_CONF_DIR |
---|
22 | # d) HADOOP_LOG_DIR |
---|
23 | # e) LIBHDFS_BUILD_DIR |
---|
24 | # f) LIBHDFS_INSTALL_DIR |
---|
25 | # g) OS_NAME |
---|
26 | # All these are passed by build.xml. |
---|
27 | # |
---|
28 | |
---|
29 | HDFS_TEST=hdfs_test |
---|
30 | HADOOP_LIB_DIR=$HADOOP_HOME/lib |
---|
31 | HADOOP_BIN_DIR=$HADOOP_HOME/bin |
---|
32 | |
---|
33 | # Manipulate HADOOP_CONF_DIR too |
---|
34 | # which is necessary to circumvent bin/hadoop |
---|
35 | HADOOP_CONF_DIR=$HADOOP_CONF_DIR:$HADOOP_HOME/conf |
---|
36 | |
---|
37 | # set pid file dir so they are not written to /tmp |
---|
38 | export HADOOP_PID_DIR=$HADOOP_LOG_DIR |
---|
39 | |
---|
40 | # CLASSPATH initially contains $HADOOP_CONF_DIR |
---|
41 | CLASSPATH="${HADOOP_CONF_DIR}" |
---|
42 | CLASSPATH=${CLASSPATH}:$JAVA_HOME/lib/tools.jar |
---|
43 | |
---|
44 | # for developers, add Hadoop classes to CLASSPATH |
---|
45 | if [ -d "$HADOOP_HOME/build/classes" ]; then |
---|
46 | CLASSPATH=${CLASSPATH}:$HADOOP_HOME/build/classes |
---|
47 | fi |
---|
48 | if [ -d "$HADOOP_HOME/build/webapps" ]; then |
---|
49 | CLASSPATH=${CLASSPATH}:$HADOOP_HOME/build |
---|
50 | fi |
---|
51 | if [ -d "$HADOOP_HOME/build/test/classes" ]; then |
---|
52 | CLASSPATH=${CLASSPATH}:$HADOOP_HOME/build/test/classes |
---|
53 | fi |
---|
54 | |
---|
55 | # so that filenames w/ spaces are handled correctly in loops below |
---|
56 | IFS= |
---|
57 | |
---|
58 | # add libs to CLASSPATH |
---|
59 | for f in $HADOOP_HOME/lib/*.jar; do |
---|
60 | CLASSPATH=${CLASSPATH}:$f; |
---|
61 | done |
---|
62 | |
---|
63 | for ff in $HADOOP_HOME/*.jar; do |
---|
64 | CLASSPATH=${CLASSPATH}:$ff |
---|
65 | done |
---|
66 | for f in $HADOOP_HOME/lib/jsp-2.0/*.jar; do |
---|
67 | CLASSPATH=${CLASSPATH}:$f; |
---|
68 | done |
---|
69 | |
---|
70 | if [ -d "$HADOOP_HOME/build/ivy/lib/Hadoop/common" ]; then |
---|
71 | for f in $HADOOP_HOME/build/ivy/lib/Hadoop/common/*.jar; do |
---|
72 | CLASSPATH=${CLASSPATH}:$f; |
---|
73 | done |
---|
74 | fi |
---|
75 | |
---|
76 | # restore ordinary behaviour |
---|
77 | unset IFS |
---|
78 | |
---|
79 | findlibjvm () { |
---|
80 | javabasedir=$JAVA_HOME |
---|
81 | case $OS_NAME in |
---|
82 | cygwin* | mingw* | pw23* ) |
---|
83 | lib_jvm_dir=`find $javabasedir -follow \( \ |
---|
84 | \( -name client -type d -prune \) -o \ |
---|
85 | \( -name "jvm.dll" -exec dirname {} \; \) \) 2> /dev/null | tr "\n" " "` |
---|
86 | ;; |
---|
87 | aix*) |
---|
88 | lib_jvm_dir=`find $javabasedir \( \ |
---|
89 | \( -name client -type d -prune \) -o \ |
---|
90 | \( -name "libjvm.*" -exec dirname {} \; \) \) 2> /dev/null | tr "\n" " "` |
---|
91 | if test -z "$lib_jvm_dir"; then |
---|
92 | lib_jvm_dir=`find $javabasedir \( \ |
---|
93 | \( -name client -type d -prune \) -o \ |
---|
94 | \( -name "libkaffevm.*" -exec dirname {} \; \) \) 2> /dev/null | tr "\n" " "` |
---|
95 | fi |
---|
96 | ;; |
---|
97 | *) |
---|
98 | lib_jvm_dir=`find $javabasedir -follow \( \ |
---|
99 | \( -name client -type d -prune \) -o \ |
---|
100 | \( -name "libjvm.*" -exec dirname {} \; \) \) 2> /dev/null | tr "\n" " "` |
---|
101 | if test -z "$lib_jvm_dir"; then |
---|
102 | lib_jvm_dir=`find $javabasedir -follow \( \ |
---|
103 | \( -name client -type d -prune \) -o \ |
---|
104 | \( -name "libkaffevm.*" -exec dirname {} \; \) \) 2> /dev/null | tr "\n" " "` |
---|
105 | fi |
---|
106 | ;; |
---|
107 | esac |
---|
108 | echo $lib_jvm_dir |
---|
109 | } |
---|
110 | LIB_JVM_DIR=`findlibjvm` |
---|
111 | echo "++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++" |
---|
112 | echo LIB_JVM_DIR = $LIB_JVM_DIR |
---|
113 | echo "++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++" |
---|
114 | # Put delays to ensure hdfs is up and running and also shuts down |
---|
115 | # after the tests are complete |
---|
116 | cd $HADOOP_HOME |
---|
117 | echo Y | $HADOOP_BIN_DIR/hadoop namenode -format && |
---|
118 | $HADOOP_BIN_DIR/hadoop-daemon.sh start namenode && sleep 2 && |
---|
119 | $HADOOP_BIN_DIR/hadoop-daemon.sh start datanode && sleep 2 && |
---|
120 | sleep 20 |
---|
121 | echo CLASSPATH=$HADOOP_CONF_DIR:$CLASSPATH LD_PRELOAD="$LIBHDFS_INSTALL_DIR/libhdfs.so:$LIB_JVM_DIR/libjvm.so" $LIBHDFS_BUILD_DIR/$HDFS_TEST && |
---|
122 | CLASSPATH=$HADOOP_CONF_DIR:$CLASSPATH LD_PRELOAD="$LIB_JVM_DIR/libjvm.so:$LIBHDFS_INSTALL_DIR/libhdfs.so:" $LIBHDFS_BUILD_DIR/$HDFS_TEST |
---|
123 | BUILD_STATUS=$? |
---|
124 | sleep 3 |
---|
125 | $HADOOP_BIN_DIR/hadoop-daemon.sh stop datanode && sleep 2 && |
---|
126 | $HADOOP_BIN_DIR/hadoop-daemon.sh stop namenode && sleep 2 |
---|
127 | |
---|
128 | echo exiting with $BUILD_STATUS |
---|
129 | exit $BUILD_STATUS |
---|