[120] | 1 | #!/bin/sh |
---|
| 2 | |
---|
| 3 | # Licensed to the Apache Software Foundation (ASF) under one or more |
---|
| 4 | # contributor license agreements. See the NOTICE file distributed with |
---|
| 5 | # this work for additional information regarding copyright ownership. |
---|
| 6 | # The ASF licenses this file to You under the Apache License, Version 2.0 |
---|
| 7 | # (the "License"); you may not use this file except in compliance with |
---|
| 8 | # the License. You may obtain a copy of the License at |
---|
| 9 | # |
---|
| 10 | # http://www.apache.org/licenses/LICENSE-2.0 |
---|
| 11 | # |
---|
| 12 | # Unless required by applicable law or agreed to in writing, software |
---|
| 13 | # distributed under the License is distributed on an "AS IS" BASIS, |
---|
| 14 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. |
---|
| 15 | # See the License for the specific language governing permissions and |
---|
| 16 | # limitations under the License. |
---|
| 17 | |
---|
| 18 | |
---|
| 19 | # Create a Hadoop AMI. Runs on the EC2 instance. |
---|
| 20 | |
---|
| 21 | # Import variables |
---|
| 22 | bin=`dirname "$0"` |
---|
| 23 | bin=`cd "$bin"; pwd` |
---|
| 24 | . "$bin"/hadoop-ec2-env.sh |
---|
| 25 | |
---|
| 26 | # Remove environment script since it contains sensitive information |
---|
| 27 | rm -f "$bin"/hadoop-ec2-env.sh |
---|
| 28 | |
---|
| 29 | # Install Java |
---|
| 30 | echo "Downloading and installing java binary." |
---|
| 31 | cd /usr/local |
---|
| 32 | wget -nv -O java.bin $JAVA_BINARY_URL |
---|
| 33 | sh java.bin |
---|
| 34 | rm -f java.bin |
---|
| 35 | |
---|
| 36 | # Install tools |
---|
| 37 | echo "Installing rpms." |
---|
| 38 | yum -y install rsync lynx screen ganglia-gmetad ganglia-gmond ganglia-web httpd php |
---|
| 39 | yum -y clean all |
---|
| 40 | |
---|
| 41 | # Install Hadoop |
---|
| 42 | echo "Installing Hadoop $HADOOP_VERSION." |
---|
| 43 | cd /usr/local |
---|
| 44 | wget -nv http://archive.apache.org/dist/hadoop/core/hadoop-$HADOOP_VERSION/hadoop-$HADOOP_VERSION.tar.gz |
---|
| 45 | [ ! -f hadoop-$HADOOP_VERSION.tar.gz ] && wget -nv http://www.apache.org/dist/hadoop/core/hadoop-$HADOOP_VERSION/hadoop-$HADOOP_VERSION.tar.gz |
---|
| 46 | tar xzf hadoop-$HADOOP_VERSION.tar.gz |
---|
| 47 | rm -f hadoop-$HADOOP_VERSION.tar.gz |
---|
| 48 | |
---|
| 49 | # Configure Hadoop |
---|
| 50 | sed -i -e "s|# export JAVA_HOME=.*|export JAVA_HOME=/usr/local/jdk${JAVA_VERSION}|" \ |
---|
| 51 | -e 's|# export HADOOP_LOG_DIR=.*|export HADOOP_LOG_DIR=/mnt/hadoop/logs|' \ |
---|
| 52 | -e 's|# export HADOOP_SLAVE_SLEEP=.*|export HADOOP_SLAVE_SLEEP=1|' \ |
---|
| 53 | -e 's|# export HADOOP_OPTS=.*|export HADOOP_OPTS=-server|' \ |
---|
| 54 | /usr/local/hadoop-$HADOOP_VERSION/conf/hadoop-env.sh |
---|
| 55 | |
---|
| 56 | # Run user data as script on instance startup |
---|
| 57 | chmod +x /etc/init.d/ec2-run-user-data |
---|
| 58 | echo "/etc/init.d/ec2-run-user-data" >> /etc/rc.d/rc.local |
---|
| 59 | |
---|
| 60 | # Setup root user bash environment |
---|
| 61 | echo "export JAVA_HOME=/usr/local/jdk${JAVA_VERSION}" >> /root/.bash_profile |
---|
| 62 | echo "export HADOOP_HOME=/usr/local/hadoop-${HADOOP_VERSION}" >> /root/.bash_profile |
---|
| 63 | echo 'export PATH=$JAVA_HOME/bin:$HADOOP_HOME/bin:$PATH' >> /root/.bash_profile |
---|
| 64 | |
---|
| 65 | # Configure networking. |
---|
| 66 | # Delete SSH authorized_keys since it includes the key it was launched with. (Note that it is re-populated when an instance starts.) |
---|
| 67 | rm -f /root/.ssh/authorized_keys |
---|
| 68 | # Ensure logging in to new hosts is seamless. |
---|
| 69 | echo ' StrictHostKeyChecking no' >> /etc/ssh/ssh_config |
---|
| 70 | |
---|
| 71 | # Bundle and upload image |
---|
| 72 | cd ~root |
---|
| 73 | # Don't need to delete .bash_history since it isn't written until exit. |
---|
| 74 | df -h |
---|
| 75 | ec2-bundle-vol -d /mnt -k /mnt/pk*.pem -c /mnt/cert*.pem -u $AWS_ACCOUNT_ID -s 3072 -p hadoop-$HADOOP_VERSION-$ARCH -r $ARCH |
---|
| 76 | |
---|
| 77 | ec2-upload-bundle -b $S3_BUCKET -m /mnt/hadoop-$HADOOP_VERSION-$ARCH.manifest.xml -a $AWS_ACCESS_KEY_ID -s $AWS_SECRET_ACCESS_KEY |
---|
| 78 | |
---|
| 79 | # End |
---|
| 80 | echo Done |
---|