source: proiecte/HadoopJUnit/hadoop-0.20.1/bin/start-dfs.sh @ 142

Last change on this file since 142 was 120, checked in by (none), 14 years ago

Added the mail files for the Hadoop JUNit Project

  • Property svn:executable set to *
File size: 1.6 KB
Line 
1#!/usr/bin/env bash
2
3# Licensed to the Apache Software Foundation (ASF) under one or more
4# contributor license agreements.  See the NOTICE file distributed with
5# this work for additional information regarding copyright ownership.
6# The ASF licenses this file to You under the Apache License, Version 2.0
7# (the "License"); you may not use this file except in compliance with
8# the License.  You may obtain a copy of the License at
9#
10#     http://www.apache.org/licenses/LICENSE-2.0
11#
12# Unless required by applicable law or agreed to in writing, software
13# distributed under the License is distributed on an "AS IS" BASIS,
14# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
15# See the License for the specific language governing permissions and
16# limitations under the License.
17
18
19# Start hadoop dfs daemons.
20# Optinally upgrade or rollback dfs state.
21# Run this on master node.
22
23usage="Usage: start-dfs.sh [-upgrade|-rollback]"
24
25bin=`dirname "$0"`
26bin=`cd "$bin"; pwd`
27
28. "$bin"/hadoop-config.sh
29
30# get arguments
31if [ $# -ge 1 ]; then
32        nameStartOpt=$1
33        shift
34        case $nameStartOpt in
35          (-upgrade)
36                ;;
37          (-rollback) 
38                dataStartOpt=$nameStartOpt
39                ;;
40          (*)
41                  echo $usage
42                  exit 1
43            ;;
44        esac
45fi
46
47# start dfs daemons
48# start namenode after datanodes, to minimize time namenode is up w/o data
49# note: datanodes will log connection errors until namenode starts
50"$bin"/hadoop-daemon.sh --config $HADOOP_CONF_DIR start namenode $nameStartOpt
51"$bin"/hadoop-daemons.sh --config $HADOOP_CONF_DIR start datanode $dataStartOpt
52"$bin"/hadoop-daemons.sh --config $HADOOP_CONF_DIR --hosts masters start secondarynamenode
Note: See TracBrowser for help on using the repository browser.