source: proiecte/HadoopJUnit/hadoop-0.20.1/src/test/org/apache/hadoop/mapred/TestClusterMapReduceTestCase.java @ 120

Last change on this file since 120 was 120, checked in by (none), 14 years ago

Added the mail files for the Hadoop JUNit Project

  • Property svn:executable set to *
File size: 3.6 KB
Line 
1/**
2 * Licensed to the Apache Software Foundation (ASF) under one
3 * or more contributor license agreements.  See the NOTICE file
4 * distributed with this work for additional information
5 * regarding copyright ownership.  The ASF licenses this file
6 * to you under the Apache License, Version 2.0 (the
7 * "License"); you may not use this file except in compliance
8 * with the License.  You may obtain a copy of the License at
9 *
10 *     http://www.apache.org/licenses/LICENSE-2.0
11 *
12 * Unless required by applicable law or agreed to in writing, software
13 * distributed under the License is distributed on an "AS IS" BASIS,
14 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
15 * See the License for the specific language governing permissions and
16 * limitations under the License.
17 */
18package org.apache.hadoop.mapred;
19
20import org.apache.hadoop.fs.FileUtil;
21import org.apache.hadoop.fs.Path;
22import org.apache.hadoop.io.LongWritable;
23import org.apache.hadoop.io.Text;
24
25import java.io.*;
26import java.util.Properties;
27
28public class TestClusterMapReduceTestCase extends ClusterMapReduceTestCase {
29  public void _testMapReduce(boolean restart) throws Exception {
30    OutputStream os = getFileSystem().create(new Path(getInputDir(), "text.txt"));
31    Writer wr = new OutputStreamWriter(os);
32    wr.write("hello1\n");
33    wr.write("hello2\n");
34    wr.write("hello3\n");
35    wr.write("hello4\n");
36    wr.close();
37
38    if (restart) {
39      stopCluster();
40      startCluster(false, null);
41    }
42   
43    JobConf conf = createJobConf();
44    conf.setJobName("mr");
45
46    conf.setInputFormat(TextInputFormat.class);
47
48    conf.setMapOutputKeyClass(LongWritable.class);
49    conf.setMapOutputValueClass(Text.class);
50
51    conf.setOutputFormat(TextOutputFormat.class);
52    conf.setOutputKeyClass(LongWritable.class);
53    conf.setOutputValueClass(Text.class);
54
55    conf.setMapperClass(org.apache.hadoop.mapred.lib.IdentityMapper.class);
56    conf.setReducerClass(org.apache.hadoop.mapred.lib.IdentityReducer.class);
57
58    FileInputFormat.setInputPaths(conf, getInputDir());
59
60    FileOutputFormat.setOutputPath(conf, getOutputDir());
61
62
63    JobClient.runJob(conf);
64
65    Path[] outputFiles = FileUtil.stat2Paths(
66                           getFileSystem().listStatus(getOutputDir(),
67                           new OutputLogFilter()));
68    if (outputFiles.length > 0) {
69      InputStream is = getFileSystem().open(outputFiles[0]);
70      BufferedReader reader = new BufferedReader(new InputStreamReader(is));
71      String line = reader.readLine();
72      int counter = 0;
73      while (line != null) {
74        counter++;
75        assertTrue(line.contains("hello"));
76        line = reader.readLine();
77      }
78      reader.close();
79      assertEquals(4, counter);
80    }
81
82  }
83
84  public void testMapReduce() throws Exception {
85    _testMapReduce(false);
86  }
87
88  public void testMapReduceRestarting() throws Exception {
89    _testMapReduce(true);
90  }
91
92  public void testDFSRestart() throws Exception {
93    Path file = new Path(getInputDir(), "text.txt");
94    OutputStream os = getFileSystem().create(file);
95    Writer wr = new OutputStreamWriter(os);
96    wr.close();
97
98    stopCluster();
99    startCluster(false, null);
100    assertTrue(getFileSystem().exists(file));
101
102    stopCluster();
103    startCluster(true, null);
104    assertFalse(getFileSystem().exists(file));
105   
106  }
107
108  public void testMRConfig() throws Exception {
109    JobConf conf = createJobConf();
110    assertNull(conf.get("xyz"));
111
112    Properties config = new Properties();
113    config.setProperty("xyz", "XYZ");
114    stopCluster();
115    startCluster(false, config);
116
117    conf = createJobConf();
118    assertEquals("XYZ", conf.get("xyz"));
119  }
120
121}
Note: See TracBrowser for help on using the repository browser.