source: proiecte/HadoopJUnit/hadoop-0.20.1/src/test/org/apache/hadoop/mapred/WordCount.java @ 120

Last change on this file since 120 was 120, checked in by (none), 14 years ago

Added the mail files for the Hadoop JUNit Project

  • Property svn:executable set to *
File size: 5.5 KB
Line 
1/**
2 * Licensed to the Apache Software Foundation (ASF) under one
3 * or more contributor license agreements.  See the NOTICE file
4 * distributed with this work for additional information
5 * regarding copyright ownership.  The ASF licenses this file
6 * to you under the Apache License, Version 2.0 (the
7 * "License"); you may not use this file except in compliance
8 * with the License.  You may obtain a copy of the License at
9 *
10 *     http://www.apache.org/licenses/LICENSE-2.0
11 *
12 * Unless required by applicable law or agreed to in writing, software
13 * distributed under the License is distributed on an "AS IS" BASIS,
14 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
15 * See the License for the specific language governing permissions and
16 * limitations under the License.
17 */
18
19package org.apache.hadoop.mapred;
20
21import java.io.IOException;
22import java.util.ArrayList;
23import java.util.Iterator;
24import java.util.List;
25import java.util.StringTokenizer;
26
27import org.apache.hadoop.conf.Configuration;
28import org.apache.hadoop.conf.Configured;
29import org.apache.hadoop.fs.Path;
30import org.apache.hadoop.io.IntWritable;
31import org.apache.hadoop.io.LongWritable;
32import org.apache.hadoop.io.Text;
33import org.apache.hadoop.mapred.FileInputFormat;
34import org.apache.hadoop.mapred.FileOutputFormat;
35import org.apache.hadoop.mapred.JobClient;
36import org.apache.hadoop.mapred.JobConf;
37import org.apache.hadoop.mapred.MapReduceBase;
38import org.apache.hadoop.mapred.Mapper;
39import org.apache.hadoop.mapred.OutputCollector;
40import org.apache.hadoop.mapred.Reducer;
41import org.apache.hadoop.mapred.Reporter;
42import org.apache.hadoop.util.Tool;
43import org.apache.hadoop.util.ToolRunner;
44
45/**
46 * This is an example Hadoop Map/Reduce application.
47 * It reads the text input files, breaks each line into words
48 * and counts them. The output is a locally sorted list of words and the
49 * count of how often they occurred.
50 *
51 * To run: bin/hadoop jar build/hadoop-examples.jar wordcount
52 *            [-m <i>maps</i>] [-r <i>reduces</i>] <i>in-dir</i> <i>out-dir</i>
53 */
54public class WordCount extends Configured implements Tool {
55 
56  /**
57   * Counts the words in each line.
58   * For each line of input, break the line into words and emit them as
59   * (<b>word</b>, <b>1</b>).
60   */
61  public static class MapClass extends MapReduceBase
62    implements Mapper<LongWritable, Text, Text, IntWritable> {
63   
64    private final static IntWritable one = new IntWritable(1);
65    private Text word = new Text();
66   
67    public void map(LongWritable key, Text value, 
68                    OutputCollector<Text, IntWritable> output, 
69                    Reporter reporter) throws IOException {
70      String line = value.toString();
71      StringTokenizer itr = new StringTokenizer(line);
72      while (itr.hasMoreTokens()) {
73        word.set(itr.nextToken());
74        output.collect(word, one);
75      }
76    }
77  }
78 
79  /**
80   * A reducer class that just emits the sum of the input values.
81   */
82  public static class Reduce extends MapReduceBase
83    implements Reducer<Text, IntWritable, Text, IntWritable> {
84   
85    public void reduce(Text key, Iterator<IntWritable> values,
86                       OutputCollector<Text, IntWritable> output, 
87                       Reporter reporter) throws IOException {
88      int sum = 0;
89      while (values.hasNext()) {
90        sum += values.next().get();
91      }
92      output.collect(key, new IntWritable(sum));
93    }
94  }
95 
96  static int printUsage() {
97    System.out.println("wordcount [-m <maps>] [-r <reduces>] <input> <output>");
98    ToolRunner.printGenericCommandUsage(System.out);
99    return -1;
100  }
101 
102  /**
103   * The main driver for word count map/reduce program.
104   * Invoke this method to submit the map/reduce job.
105   * @throws IOException When there is communication problems with the
106   *                     job tracker.
107   */
108  public int run(String[] args) throws Exception {
109    JobConf conf = new JobConf(getConf(), WordCount.class);
110    conf.setJobName("wordcount");
111 
112    // the keys are words (strings)
113    conf.setOutputKeyClass(Text.class);
114    // the values are counts (ints)
115    conf.setOutputValueClass(IntWritable.class);
116   
117    conf.setMapperClass(MapClass.class);       
118    conf.setCombinerClass(Reduce.class);
119    conf.setReducerClass(Reduce.class);
120   
121    List<String> other_args = new ArrayList<String>();
122    for(int i=0; i < args.length; ++i) {
123      try {
124        if ("-m".equals(args[i])) {
125          conf.setNumMapTasks(Integer.parseInt(args[++i]));
126        } else if ("-r".equals(args[i])) {
127          conf.setNumReduceTasks(Integer.parseInt(args[++i]));
128        } else {
129          other_args.add(args[i]);
130        }
131      } catch (NumberFormatException except) {
132        System.out.println("ERROR: Integer expected instead of " + args[i]);
133        return printUsage();
134      } catch (ArrayIndexOutOfBoundsException except) {
135        System.out.println("ERROR: Required parameter missing from " +
136                           args[i-1]);
137        return printUsage();
138      }
139    }
140    // Make sure there are exactly 2 parameters left.
141    if (other_args.size() != 2) {
142      System.out.println("ERROR: Wrong number of parameters: " +
143                         other_args.size() + " instead of 2.");
144      return printUsage();
145    }
146    FileInputFormat.setInputPaths(conf, other_args.get(0));
147    FileOutputFormat.setOutputPath(conf, new Path(other_args.get(1)));
148       
149    JobClient.runJob(conf);
150    return 0;
151  }
152 
153 
154  public static void main(String[] args) throws Exception {
155    int res = ToolRunner.run(new Configuration(), new WordCount(), args);
156    System.exit(res);
157  }
158
159}
Note: See TracBrowser for help on using the repository browser.