1 | /** |
---|
2 | * Licensed to the Apache Software Foundation (ASF) under one |
---|
3 | * or more contributor license agreements. See the NOTICE file |
---|
4 | * distributed with this work for additional information |
---|
5 | * regarding copyright ownership. The ASF licenses this file |
---|
6 | * to you under the Apache License, Version 2.0 (the |
---|
7 | * "License"); you may not use this file except in compliance |
---|
8 | * with the License. You may obtain a copy of the License at |
---|
9 | * |
---|
10 | * http://www.apache.org/licenses/LICENSE-2.0 |
---|
11 | * |
---|
12 | * Unless required by applicable law or agreed to in writing, software |
---|
13 | * distributed under the License is distributed on an "AS IS" BASIS, |
---|
14 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. |
---|
15 | * See the License for the specific language governing permissions and |
---|
16 | * limitations under the License. |
---|
17 | */ |
---|
18 | package org.apache.hadoop.mapred; |
---|
19 | |
---|
20 | import java.io.BufferedReader; |
---|
21 | import java.io.ByteArrayOutputStream; |
---|
22 | import java.io.IOException; |
---|
23 | import java.io.InputStreamReader; |
---|
24 | import java.io.OutputStream; |
---|
25 | import java.io.OutputStreamWriter; |
---|
26 | import java.io.PipedInputStream; |
---|
27 | import java.io.PipedOutputStream; |
---|
28 | import java.io.PrintStream; |
---|
29 | import java.io.Writer; |
---|
30 | |
---|
31 | import org.apache.commons.logging.Log; |
---|
32 | import org.apache.commons.logging.LogFactory; |
---|
33 | import org.apache.hadoop.conf.Configuration; |
---|
34 | import org.apache.hadoop.fs.Path; |
---|
35 | import org.apache.hadoop.io.LongWritable; |
---|
36 | import org.apache.hadoop.io.Text; |
---|
37 | import org.apache.hadoop.util.Tool; |
---|
38 | import org.apache.hadoop.util.ToolRunner; |
---|
39 | |
---|
40 | public class TestJobClient extends ClusterMapReduceTestCase { |
---|
41 | |
---|
42 | private static final Log LOG = LogFactory.getLog(TestJobClient.class); |
---|
43 | |
---|
44 | private String runJob() throws Exception { |
---|
45 | OutputStream os = getFileSystem().create(new Path(getInputDir(), "text.txt")); |
---|
46 | Writer wr = new OutputStreamWriter(os); |
---|
47 | wr.write("hello1\n"); |
---|
48 | wr.write("hello2\n"); |
---|
49 | wr.write("hello3\n"); |
---|
50 | wr.close(); |
---|
51 | |
---|
52 | JobConf conf = createJobConf(); |
---|
53 | conf.setJobName("mr"); |
---|
54 | conf.setJobPriority(JobPriority.HIGH); |
---|
55 | |
---|
56 | conf.setInputFormat(TextInputFormat.class); |
---|
57 | |
---|
58 | conf.setMapOutputKeyClass(LongWritable.class); |
---|
59 | conf.setMapOutputValueClass(Text.class); |
---|
60 | |
---|
61 | conf.setOutputFormat(TextOutputFormat.class); |
---|
62 | conf.setOutputKeyClass(LongWritable.class); |
---|
63 | conf.setOutputValueClass(Text.class); |
---|
64 | |
---|
65 | conf.setMapperClass(org.apache.hadoop.mapred.lib.IdentityMapper.class); |
---|
66 | conf.setReducerClass(org.apache.hadoop.mapred.lib.IdentityReducer.class); |
---|
67 | |
---|
68 | FileInputFormat.setInputPaths(conf, getInputDir()); |
---|
69 | FileOutputFormat.setOutputPath(conf, getOutputDir()); |
---|
70 | |
---|
71 | return JobClient.runJob(conf).getID().toString(); |
---|
72 | } |
---|
73 | |
---|
74 | private int runTool(Configuration conf, Tool tool, String[] args, OutputStream out) throws Exception { |
---|
75 | PrintStream oldOut = System.out; |
---|
76 | PrintStream newOut = new PrintStream(out, true); |
---|
77 | try { |
---|
78 | System.setOut(newOut); |
---|
79 | return ToolRunner.run(conf, tool, args); |
---|
80 | } finally { |
---|
81 | System.setOut(oldOut); |
---|
82 | } |
---|
83 | } |
---|
84 | |
---|
85 | public void testGetCounter() throws Exception { |
---|
86 | String jobId = runJob(); |
---|
87 | ByteArrayOutputStream out = new ByteArrayOutputStream(); |
---|
88 | int exitCode = runTool(createJobConf(), new JobClient(), |
---|
89 | new String[] { "-counter", jobId, |
---|
90 | "org.apache.hadoop.mapred.Task$Counter", "MAP_INPUT_RECORDS" }, |
---|
91 | out); |
---|
92 | assertEquals("Exit code", 0, exitCode); |
---|
93 | assertEquals("Counter", "3", out.toString().trim()); |
---|
94 | } |
---|
95 | |
---|
96 | public void testJobList() throws Exception { |
---|
97 | String jobId = runJob(); |
---|
98 | verifyJobPriority(jobId, "HIGH"); |
---|
99 | } |
---|
100 | |
---|
101 | private void verifyJobPriority(String jobId, String priority) |
---|
102 | throws Exception { |
---|
103 | PipedInputStream pis = new PipedInputStream(); |
---|
104 | PipedOutputStream pos = new PipedOutputStream(pis); |
---|
105 | int exitCode = runTool(createJobConf(), new JobClient(), |
---|
106 | new String[] { "-list", "all" }, |
---|
107 | pos); |
---|
108 | assertEquals("Exit code", 0, exitCode); |
---|
109 | BufferedReader br = new BufferedReader(new InputStreamReader(pis)); |
---|
110 | String line = null; |
---|
111 | while ((line=br.readLine()) != null) { |
---|
112 | LOG.info("line = " + line); |
---|
113 | if (!line.startsWith(jobId)) { |
---|
114 | continue; |
---|
115 | } |
---|
116 | assertTrue(line.contains(priority)); |
---|
117 | break; |
---|
118 | } |
---|
119 | pis.close(); |
---|
120 | } |
---|
121 | |
---|
122 | public void testChangingJobPriority() throws Exception { |
---|
123 | String jobId = runJob(); |
---|
124 | int exitCode = runTool(createJobConf(), new JobClient(), |
---|
125 | new String[] { "-set-priority", jobId, "VERY_LOW" }, |
---|
126 | new ByteArrayOutputStream()); |
---|
127 | assertEquals("Exit code", 0, exitCode); |
---|
128 | verifyJobPriority(jobId, "VERY_LOW"); |
---|
129 | } |
---|
130 | } |
---|