source: proiecte/HadoopJUnit/hadoop-0.20.1/src/contrib/ec2/bin/image/create-hadoop-image-remote @ 120

Last change on this file since 120 was 120, checked in by (none), 14 years ago

Added the mail files for the Hadoop JUNit Project

  • Property svn:executable set to *
File size: 3.1 KB
Line 
1#!/bin/sh
2
3# Licensed to the Apache Software Foundation (ASF) under one or more
4# contributor license agreements.  See the NOTICE file distributed with
5# this work for additional information regarding copyright ownership.
6# The ASF licenses this file to You under the Apache License, Version 2.0
7# (the "License"); you may not use this file except in compliance with
8# the License.  You may obtain a copy of the License at
9#
10#     http://www.apache.org/licenses/LICENSE-2.0
11#
12# Unless required by applicable law or agreed to in writing, software
13# distributed under the License is distributed on an "AS IS" BASIS,
14# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
15# See the License for the specific language governing permissions and
16# limitations under the License.
17
18
19# Create a Hadoop AMI. Runs on the EC2 instance.
20
21# Import variables
22bin=`dirname "$0"`
23bin=`cd "$bin"; pwd`
24. "$bin"/hadoop-ec2-env.sh
25
26# Remove environment script since it contains sensitive information
27rm -f "$bin"/hadoop-ec2-env.sh
28
29# Install Java
30echo "Downloading and installing java binary."
31cd /usr/local
32wget -nv -O java.bin $JAVA_BINARY_URL
33sh java.bin
34rm -f java.bin
35
36# Install tools
37echo "Installing rpms."
38yum -y install rsync lynx screen ganglia-gmetad ganglia-gmond ganglia-web httpd php
39yum -y clean all
40
41# Install Hadoop
42echo "Installing Hadoop $HADOOP_VERSION."
43cd /usr/local
44wget -nv http://archive.apache.org/dist/hadoop/core/hadoop-$HADOOP_VERSION/hadoop-$HADOOP_VERSION.tar.gz
45[ ! -f hadoop-$HADOOP_VERSION.tar.gz ] && wget -nv http://www.apache.org/dist/hadoop/core/hadoop-$HADOOP_VERSION/hadoop-$HADOOP_VERSION.tar.gz
46tar xzf hadoop-$HADOOP_VERSION.tar.gz
47rm -f hadoop-$HADOOP_VERSION.tar.gz
48
49# Configure Hadoop
50sed -i -e "s|# export JAVA_HOME=.*|export JAVA_HOME=/usr/local/jdk${JAVA_VERSION}|" \
51       -e 's|# export HADOOP_LOG_DIR=.*|export HADOOP_LOG_DIR=/mnt/hadoop/logs|' \
52       -e 's|# export HADOOP_SLAVE_SLEEP=.*|export HADOOP_SLAVE_SLEEP=1|' \
53       -e 's|# export HADOOP_OPTS=.*|export HADOOP_OPTS=-server|' \
54      /usr/local/hadoop-$HADOOP_VERSION/conf/hadoop-env.sh
55
56# Run user data as script on instance startup
57chmod +x /etc/init.d/ec2-run-user-data
58echo "/etc/init.d/ec2-run-user-data" >> /etc/rc.d/rc.local
59
60# Setup root user bash environment
61echo "export JAVA_HOME=/usr/local/jdk${JAVA_VERSION}" >> /root/.bash_profile
62echo "export HADOOP_HOME=/usr/local/hadoop-${HADOOP_VERSION}" >> /root/.bash_profile
63echo 'export PATH=$JAVA_HOME/bin:$HADOOP_HOME/bin:$PATH' >> /root/.bash_profile
64
65# Configure networking.
66# Delete SSH authorized_keys since it includes the key it was launched with. (Note that it is re-populated when an instance starts.)
67rm -f /root/.ssh/authorized_keys
68# Ensure logging in to new hosts is seamless.
69echo '    StrictHostKeyChecking no' >> /etc/ssh/ssh_config
70
71# Bundle and upload image
72cd ~root
73# Don't need to delete .bash_history since it isn't written until exit.
74df -h
75ec2-bundle-vol -d /mnt -k /mnt/pk*.pem -c /mnt/cert*.pem -u $AWS_ACCOUNT_ID -s 3072 -p hadoop-$HADOOP_VERSION-$ARCH -r $ARCH
76
77ec2-upload-bundle -b $S3_BUCKET -m /mnt/hadoop-$HADOOP_VERSION-$ARCH.manifest.xml -a $AWS_ACCESS_KEY_ID -s $AWS_SECRET_ACCESS_KEY
78
79# End
80echo Done
Note: See TracBrowser for help on using the repository browser.