source: proiecte/HadoopJUnit/hadoop-0.20.1/src/c++/libhdfs/tests/test-libhdfs.sh @ 120

Last change on this file since 120 was 120, checked in by (none), 14 years ago

Added the mail files for the Hadoop JUNit Project

  • Property svn:executable set to *
File size: 4.2 KB
Line 
1#
2# Copyright 2005 The Apache Software Foundation
3#
4# Licensed under the Apache License, Version 2.0 (the "License");
5# you may not use this file except in compliance with the License.
6# You may obtain a copy of the License at
7#
8#     http://www.apache.org/licenses/LICENSE-2.0
9#
10# Unless required by applicable law or agreed to in writing, software
11# distributed under the License is distributed on an "AS IS" BASIS,
12# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13# See the License for the specific language governing permissions and
14# limitations under the License.
15#
16
17#
18# Note: This script depends on 5 environment variables to function correctly:
19# a) CLASSPATH
20# b) HADOOP_HOME
21# c) HADOOP_CONF_DIR
22# d) HADOOP_LOG_DIR
23# e) LIBHDFS_BUILD_DIR
24# f) LIBHDFS_INSTALL_DIR
25# g) OS_NAME
26# All these are passed by build.xml.
27#
28
29HDFS_TEST=hdfs_test
30HADOOP_LIB_DIR=$HADOOP_HOME/lib
31HADOOP_BIN_DIR=$HADOOP_HOME/bin
32
33# Manipulate HADOOP_CONF_DIR too
34# which is necessary to circumvent bin/hadoop
35HADOOP_CONF_DIR=$HADOOP_CONF_DIR:$HADOOP_HOME/conf
36
37# set pid file dir so they are not written to /tmp
38export HADOOP_PID_DIR=$HADOOP_LOG_DIR
39
40# CLASSPATH initially contains $HADOOP_CONF_DIR
41CLASSPATH="${HADOOP_CONF_DIR}"
42CLASSPATH=${CLASSPATH}:$JAVA_HOME/lib/tools.jar
43
44# for developers, add Hadoop classes to CLASSPATH
45if [ -d "$HADOOP_HOME/build/classes" ]; then
46  CLASSPATH=${CLASSPATH}:$HADOOP_HOME/build/classes
47fi
48if [ -d "$HADOOP_HOME/build/webapps" ]; then
49  CLASSPATH=${CLASSPATH}:$HADOOP_HOME/build
50fi
51if [ -d "$HADOOP_HOME/build/test/classes" ]; then
52  CLASSPATH=${CLASSPATH}:$HADOOP_HOME/build/test/classes
53fi
54
55# so that filenames w/ spaces are handled correctly in loops below
56IFS=
57
58# add libs to CLASSPATH
59for f in $HADOOP_HOME/lib/*.jar; do
60  CLASSPATH=${CLASSPATH}:$f;
61done
62
63for ff in $HADOOP_HOME/*.jar; do 
64  CLASSPATH=${CLASSPATH}:$ff
65done
66for f in $HADOOP_HOME/lib/jsp-2.0/*.jar; do
67  CLASSPATH=${CLASSPATH}:$f;
68done
69
70if [ -d "$HADOOP_HOME/build/ivy/lib/Hadoop/common" ]; then
71for f in $HADOOP_HOME/build/ivy/lib/Hadoop/common/*.jar; do
72  CLASSPATH=${CLASSPATH}:$f;
73done
74fi
75
76# restore ordinary behaviour
77unset IFS
78
79findlibjvm () {
80javabasedir=$JAVA_HOME
81case $OS_NAME in
82    cygwin* | mingw* | pw23* )
83    lib_jvm_dir=`find $javabasedir -follow \( \
84        \( -name client -type d -prune \) -o \
85        \( -name "jvm.dll" -exec dirname {} \; \) \) 2> /dev/null | tr "\n" " "`
86    ;;
87    aix*)
88    lib_jvm_dir=`find $javabasedir \( \
89        \( -name client -type d -prune \) -o \
90        \( -name "libjvm.*" -exec dirname {} \; \) \) 2> /dev/null | tr "\n" " "`
91    if test -z "$lib_jvm_dir"; then
92       lib_jvm_dir=`find $javabasedir \( \
93       \( -name client -type d -prune \) -o \
94       \( -name "libkaffevm.*" -exec dirname {} \; \) \) 2> /dev/null | tr "\n" " "`
95    fi
96    ;;
97    *)
98    lib_jvm_dir=`find $javabasedir -follow \( \
99       \( -name client -type d -prune \) -o \
100       \( -name "libjvm.*" -exec dirname {} \; \) \) 2> /dev/null | tr "\n" " "`
101    if test -z "$lib_jvm_dir"; then
102       lib_jvm_dir=`find $javabasedir -follow \( \
103       \( -name client -type d -prune \) -o \
104       \( -name "libkaffevm.*" -exec dirname {} \; \) \) 2> /dev/null | tr "\n" " "`
105    fi
106    ;;
107  esac
108  echo $lib_jvm_dir
109}
110LIB_JVM_DIR=`findlibjvm`
111echo  "++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++"
112echo  LIB_JVM_DIR = $LIB_JVM_DIR
113echo  "++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++"
114# Put delays to ensure hdfs is up and running and also shuts down
115# after the tests are complete
116cd $HADOOP_HOME
117echo Y | $HADOOP_BIN_DIR/hadoop namenode -format &&
118$HADOOP_BIN_DIR/hadoop-daemon.sh start namenode && sleep 2 && 
119$HADOOP_BIN_DIR/hadoop-daemon.sh start datanode && sleep 2 && 
120sleep 20
121echo CLASSPATH=$HADOOP_CONF_DIR:$CLASSPATH LD_PRELOAD="$LIBHDFS_INSTALL_DIR/libhdfs.so:$LIB_JVM_DIR/libjvm.so" $LIBHDFS_BUILD_DIR/$HDFS_TEST && 
122CLASSPATH=$HADOOP_CONF_DIR:$CLASSPATH LD_PRELOAD="$LIB_JVM_DIR/libjvm.so:$LIBHDFS_INSTALL_DIR/libhdfs.so:" $LIBHDFS_BUILD_DIR/$HDFS_TEST
123BUILD_STATUS=$?
124sleep 3
125$HADOOP_BIN_DIR/hadoop-daemon.sh stop datanode && sleep 2 && 
126$HADOOP_BIN_DIR/hadoop-daemon.sh stop namenode && sleep 2 
127
128echo exiting with $BUILD_STATUS
129exit $BUILD_STATUS
Note: See TracBrowser for help on using the repository browser.