source: proiecte/HadoopJUnit/hadoop-0.20.1/src/test/org/apache/hadoop/conf/TestNoDefaultsJobConf.java @ 120

Last change on this file since 120 was 120, checked in by (none), 14 years ago

Added the mail files for the Hadoop JUNit Project

  • Property svn:executable set to *
File size: 3.4 KB
Line 
1/**
2 * Licensed to the Apache Software Foundation (ASF) under one
3 * or more contributor license agreements.  See the NOTICE file
4 * distributed with this work for additional information
5 * regarding copyright ownership.  The ASF licenses this file
6 * to you under the Apache License, Version 2.0 (the
7 * "License"); you may not use this file except in compliance
8 * with the License.  You may obtain a copy of the License at
9 *
10 *     http://www.apache.org/licenses/LICENSE-2.0
11 *
12 * Unless required by applicable law or agreed to in writing, software
13 * distributed under the License is distributed on an "AS IS" BASIS,
14 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
15 * See the License for the specific language governing permissions and
16 * limitations under the License.
17 */
18package org.apache.hadoop.conf;
19
20import junit.framework.Assert;
21
22import org.apache.hadoop.mapred.*;
23import org.apache.hadoop.fs.Path;
24import org.apache.hadoop.fs.FileUtil;
25import org.apache.hadoop.io.LongWritable;
26import org.apache.hadoop.io.Text;
27
28import java.io.*;
29
30/**
31 * This testcase tests that a JobConf without default values submits jobs
32 * properly and the JT applies its own default values to it to make the job
33 * run properly.
34 */
35public class TestNoDefaultsJobConf extends HadoopTestCase {
36
37  public TestNoDefaultsJobConf() throws IOException {
38    super(HadoopTestCase.CLUSTER_MR, HadoopTestCase.DFS_FS, 1, 1);
39  }
40
41  public void testNoDefaults() throws Exception {
42    JobConf configuration = new JobConf();
43    assertTrue(configuration.get("hadoop.tmp.dir", null) != null);
44
45    configuration = new JobConf(false);
46    assertTrue(configuration.get("hadoop.tmp.dir", null) == null);
47
48
49    Path inDir = new Path("testing/jobconf/input");
50    Path outDir = new Path("testing/jobconf/output");
51
52    OutputStream os = getFileSystem().create(new Path(inDir, "text.txt"));
53    Writer wr = new OutputStreamWriter(os);
54    wr.write("hello\n");
55    wr.write("hello\n");
56    wr.close();
57
58    JobConf conf = new JobConf(false);
59
60    //seeding JT and NN info into non-defaults (empty jobconf)
61    conf.set("mapred.job.tracker", createJobConf().get("mapred.job.tracker"));
62    conf.set("fs.default.name", createJobConf().get("fs.default.name"));
63
64    conf.setJobName("mr");
65
66    conf.setInputFormat(TextInputFormat.class);
67
68    conf.setMapOutputKeyClass(LongWritable.class);
69    conf.setMapOutputValueClass(Text.class);
70
71    conf.setOutputFormat(TextOutputFormat.class);
72    conf.setOutputKeyClass(LongWritable.class);
73    conf.setOutputValueClass(Text.class);
74
75    conf.setMapperClass(org.apache.hadoop.mapred.lib.IdentityMapper.class);
76    conf.setReducerClass(org.apache.hadoop.mapred.lib.IdentityReducer.class);
77
78    FileInputFormat.setInputPaths(conf, inDir);
79
80    FileOutputFormat.setOutputPath(conf, outDir);
81
82    JobClient.runJob(conf);
83
84    Path[] outputFiles = FileUtil.stat2Paths(
85                           getFileSystem().listStatus(outDir,
86                           new OutputLogFilter()));
87    if (outputFiles.length > 0) {
88      InputStream is = getFileSystem().open(outputFiles[0]);
89      BufferedReader reader = new BufferedReader(new InputStreamReader(is));
90      String line = reader.readLine();
91      int counter = 0;
92      while (line != null) {
93        counter++;
94        assertTrue(line.contains("hello"));
95        line = reader.readLine();
96      }
97      reader.close();
98      assertEquals(2, counter);
99    }
100
101  }
102
103}
Note: See TracBrowser for help on using the repository browser.