1 | #Licensed to the Apache Software Foundation (ASF) under one |
---|
2 | #or more contributor license agreements. See the NOTICE file |
---|
3 | #distributed with this work for additional information |
---|
4 | #regarding copyright ownership. The ASF licenses this file |
---|
5 | #to you under the Apache License, Version 2.0 (the |
---|
6 | #"License"); you may not use this file except in compliance |
---|
7 | #with the License. You may obtain a copy of the License at |
---|
8 | |
---|
9 | # http://www.apache.org/licenses/LICENSE-2.0 |
---|
10 | |
---|
11 | #Unless required by applicable law or agreed to in writing, software |
---|
12 | #distributed under the License is distributed on an "AS IS" BASIS, |
---|
13 | #WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. |
---|
14 | #See the License for the specific language governing permissions and |
---|
15 | #limitations under the License. |
---|
16 | import unittest, os, sys, re, threading, time |
---|
17 | |
---|
18 | myDirectory = os.path.realpath(sys.argv[0]) |
---|
19 | rootDirectory = re.sub("/testing/.*", "", myDirectory) |
---|
20 | |
---|
21 | sys.path.append(rootDirectory) |
---|
22 | |
---|
23 | from testing.lib import BaseTestSuite |
---|
24 | |
---|
25 | excludes = [] |
---|
26 | |
---|
27 | import tempfile, getpass |
---|
28 | from xml.dom import minidom |
---|
29 | |
---|
30 | from hodlib.Hod.hadoop import hadoopConfig |
---|
31 | |
---|
32 | # All test-case classes should have the naming convention test_.* |
---|
33 | class test_hadoopConfig(unittest.TestCase): |
---|
34 | def setUp(self): |
---|
35 | self.__hadoopConfig = hadoopConfig() |
---|
36 | self.rootDir = '/tmp/hod-%s' % getpass.getuser() |
---|
37 | if not os.path.exists(self.rootDir): |
---|
38 | os.mkdir(self.rootDir) |
---|
39 | self.testingDir = tempfile.mkdtemp( dir=self.rootDir, |
---|
40 | prefix='HadoopTestSuite.test_hadoopConfig') |
---|
41 | self.confDir = tempfile.mkdtemp(dir=self.rootDir, |
---|
42 | prefix='HadoopTestSuite.test_hadoopConfig') |
---|
43 | self.tempDir = '/tmp/hod-%s/something' % getpass.getuser() |
---|
44 | self.hadoopSite = os.path.join(self.confDir,'hadoop-site.xml') |
---|
45 | self.numNodes = 4 |
---|
46 | self.hdfsAddr = 'nosuchhost1.apache.org:50505' |
---|
47 | self.mapredAddr = 'nosuchhost2.apache.org:50506' |
---|
48 | self.finalServerParams = { |
---|
49 | 'mapred.child.java.opts' : '-Xmx1024m', |
---|
50 | 'mapred.compress.map.output' : 'false', |
---|
51 | } |
---|
52 | self.serverParams = { |
---|
53 | 'mapred.userlog.limit' : '200', |
---|
54 | 'mapred.userlog.retain.hours' : '10', |
---|
55 | 'mapred.reduce.parallel.copies' : '20', |
---|
56 | } |
---|
57 | self.clientParams = { |
---|
58 | 'mapred.tasktracker.tasks.maximum' : '2', |
---|
59 | 'io.sort.factor' : '100', |
---|
60 | 'io.sort.mb' : '200', |
---|
61 | 'mapred.userlog.limit.kb' : '1024', |
---|
62 | 'io.file.buffer.size' : '262144', |
---|
63 | } |
---|
64 | self.clusterFactor = 1.9 |
---|
65 | self.mySysDir = '/user/' + getpass.getuser() + '/mapredsystem' |
---|
66 | pass |
---|
67 | |
---|
68 | def testSuccess(self): |
---|
69 | self.__hadoopConfig.gen_site_conf( |
---|
70 | confDir = self.confDir,\ |
---|
71 | tempDir = self.tempDir,\ |
---|
72 | numNodes = self.numNodes,\ |
---|
73 | hdfsAddr = self.hdfsAddr,\ |
---|
74 | mrSysDir = self.mySysDir,\ |
---|
75 | mapredAddr = self.mapredAddr,\ |
---|
76 | clientParams = self.clientParams,\ |
---|
77 | serverParams = self.serverParams,\ |
---|
78 | finalServerParams = self.finalServerParams,\ |
---|
79 | clusterFactor = self.clusterFactor |
---|
80 | |
---|
81 | ) |
---|
82 | xmldoc = minidom.parse(self.hadoopSite) |
---|
83 | xmldoc = xmldoc.childNodes[0] # leave out xml spec |
---|
84 | properties = xmldoc.childNodes # children of tag configuration |
---|
85 | keyvals = {} |
---|
86 | for prop in properties: |
---|
87 | if not isinstance(prop,minidom.Comment): |
---|
88 | # ---------- tag -------------------- -value elem-- data -- |
---|
89 | name = prop.getElementsByTagName('name')[0].childNodes[0].data |
---|
90 | value = prop.getElementsByTagName('value')[0].childNodes[0].data |
---|
91 | keyvals[name] = value |
---|
92 | |
---|
93 | # fs.default.name should start with hdfs:// |
---|
94 | assert(keyvals['fs.default.name'].startswith('hdfs://')) |
---|
95 | assert(keyvals['hadoop.tmp.dir'] == self.tempDir) |
---|
96 | |
---|
97 | # TODO other tests |
---|
98 | pass |
---|
99 | |
---|
100 | def tearDown(self): |
---|
101 | if os.path.exists(self.hadoopSite): os.unlink(self.hadoopSite) |
---|
102 | if os.path.exists(self.confDir) : os.rmdir(self.confDir) |
---|
103 | if os.path.exists(self.testingDir) : os.rmdir(self.testingDir) |
---|
104 | pass |
---|
105 | |
---|
106 | class HadoopTestSuite(BaseTestSuite): |
---|
107 | def __init__(self): |
---|
108 | # suite setup |
---|
109 | BaseTestSuite.__init__(self, __name__, excludes) |
---|
110 | pass |
---|
111 | |
---|
112 | def cleanUp(self): |
---|
113 | # suite tearDown |
---|
114 | pass |
---|
115 | |
---|
116 | def RunHadoopTests(): |
---|
117 | suite = HadoopTestSuite() |
---|
118 | testResult = suite.runTests() |
---|
119 | suite.cleanUp() |
---|
120 | return testResult |
---|
121 | |
---|
122 | if __name__ == "__main__": |
---|
123 | RunHadoopTests() |
---|