1 | /** |
---|
2 | * Licensed to the Apache Software Foundation (ASF) under one |
---|
3 | * or more contributor license agreements. See the NOTICE file |
---|
4 | * distributed with this work for additional information |
---|
5 | * regarding copyright ownership. The ASF licenses this file |
---|
6 | * to you under the Apache License, Version 2.0 (the |
---|
7 | * "License"); you may not use this file except in compliance |
---|
8 | * with the License. You may obtain a copy of the License at |
---|
9 | * |
---|
10 | * http://www.apache.org/licenses/LICENSE-2.0 |
---|
11 | * |
---|
12 | * Unless required by applicable law or agreed to in writing, software |
---|
13 | * distributed under the License is distributed on an "AS IS" BASIS, |
---|
14 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. |
---|
15 | * See the License for the specific language governing permissions and |
---|
16 | * limitations under the License. |
---|
17 | */ |
---|
18 | |
---|
19 | package org.apache.hadoop.net; |
---|
20 | |
---|
21 | |
---|
22 | import java.util.HashMap; |
---|
23 | import java.util.Map; |
---|
24 | |
---|
25 | import junit.framework.TestCase; |
---|
26 | |
---|
27 | import org.apache.hadoop.hdfs.server.namenode.DatanodeDescriptor; |
---|
28 | import org.apache.hadoop.hdfs.protocol.DatanodeID; |
---|
29 | |
---|
30 | public class TestNetworkTopology extends TestCase { |
---|
31 | private final static NetworkTopology cluster = new NetworkTopology(); |
---|
32 | private final static DatanodeDescriptor dataNodes[] = new DatanodeDescriptor[] { |
---|
33 | new DatanodeDescriptor(new DatanodeID("h1:5020"), "/d1/r1"), |
---|
34 | new DatanodeDescriptor(new DatanodeID("h2:5020"), "/d1/r1"), |
---|
35 | new DatanodeDescriptor(new DatanodeID("h3:5020"), "/d1/r2"), |
---|
36 | new DatanodeDescriptor(new DatanodeID("h4:5020"), "/d1/r2"), |
---|
37 | new DatanodeDescriptor(new DatanodeID("h5:5020"), "/d1/r2"), |
---|
38 | new DatanodeDescriptor(new DatanodeID("h6:5020"), "/d2/r3"), |
---|
39 | new DatanodeDescriptor(new DatanodeID("h7:5020"), "/d2/r3") |
---|
40 | }; |
---|
41 | private final static DatanodeDescriptor NODE = |
---|
42 | new DatanodeDescriptor(new DatanodeID("h8:5020"), "/d2/r4"); |
---|
43 | |
---|
44 | static { |
---|
45 | for(int i=0; i<dataNodes.length; i++) { |
---|
46 | cluster.add(dataNodes[i]); |
---|
47 | } |
---|
48 | } |
---|
49 | |
---|
50 | public void testContains() throws Exception { |
---|
51 | for(int i=0; i<dataNodes.length; i++) { |
---|
52 | assertTrue(cluster.contains(dataNodes[i])); |
---|
53 | } |
---|
54 | assertFalse(cluster.contains(NODE)); |
---|
55 | } |
---|
56 | |
---|
57 | public void testNumOfChildren() throws Exception { |
---|
58 | assertEquals(cluster.getNumOfLeaves(), dataNodes.length); |
---|
59 | } |
---|
60 | |
---|
61 | public void testRacks() throws Exception { |
---|
62 | assertEquals(cluster.getNumOfRacks(), 3); |
---|
63 | assertTrue(cluster.isOnSameRack(dataNodes[0], dataNodes[1])); |
---|
64 | assertFalse(cluster.isOnSameRack(dataNodes[1], dataNodes[2])); |
---|
65 | assertTrue(cluster.isOnSameRack(dataNodes[2], dataNodes[3])); |
---|
66 | assertTrue(cluster.isOnSameRack(dataNodes[3], dataNodes[4])); |
---|
67 | assertFalse(cluster.isOnSameRack(dataNodes[4], dataNodes[5])); |
---|
68 | assertTrue(cluster.isOnSameRack(dataNodes[5], dataNodes[6])); |
---|
69 | } |
---|
70 | |
---|
71 | public void testGetDistance() throws Exception { |
---|
72 | assertEquals(cluster.getDistance(dataNodes[0], dataNodes[0]), 0); |
---|
73 | assertEquals(cluster.getDistance(dataNodes[0], dataNodes[1]), 2); |
---|
74 | assertEquals(cluster.getDistance(dataNodes[0], dataNodes[3]), 4); |
---|
75 | assertEquals(cluster.getDistance(dataNodes[0], dataNodes[6]), 6); |
---|
76 | } |
---|
77 | |
---|
78 | public void testPseudoSortByDistance() throws Exception { |
---|
79 | DatanodeDescriptor[] testNodes = new DatanodeDescriptor[3]; |
---|
80 | |
---|
81 | // array contains both local node & local rack node |
---|
82 | testNodes[0] = dataNodes[1]; |
---|
83 | testNodes[1] = dataNodes[2]; |
---|
84 | testNodes[2] = dataNodes[0]; |
---|
85 | cluster.pseudoSortByDistance(dataNodes[0], testNodes ); |
---|
86 | assertTrue(testNodes[0] == dataNodes[0]); |
---|
87 | assertTrue(testNodes[1] == dataNodes[1]); |
---|
88 | assertTrue(testNodes[2] == dataNodes[2]); |
---|
89 | |
---|
90 | // array contains local node |
---|
91 | testNodes[0] = dataNodes[1]; |
---|
92 | testNodes[1] = dataNodes[3]; |
---|
93 | testNodes[2] = dataNodes[0]; |
---|
94 | cluster.pseudoSortByDistance(dataNodes[0], testNodes ); |
---|
95 | assertTrue(testNodes[0] == dataNodes[0]); |
---|
96 | assertTrue(testNodes[1] == dataNodes[1]); |
---|
97 | assertTrue(testNodes[2] == dataNodes[3]); |
---|
98 | |
---|
99 | // array contains local rack node |
---|
100 | testNodes[0] = dataNodes[5]; |
---|
101 | testNodes[1] = dataNodes[3]; |
---|
102 | testNodes[2] = dataNodes[1]; |
---|
103 | cluster.pseudoSortByDistance(dataNodes[0], testNodes ); |
---|
104 | assertTrue(testNodes[0] == dataNodes[1]); |
---|
105 | assertTrue(testNodes[1] == dataNodes[3]); |
---|
106 | assertTrue(testNodes[2] == dataNodes[5]); |
---|
107 | } |
---|
108 | |
---|
109 | public void testRemove() throws Exception { |
---|
110 | for(int i=0; i<dataNodes.length; i++) { |
---|
111 | cluster.remove(dataNodes[i]); |
---|
112 | } |
---|
113 | for(int i=0; i<dataNodes.length; i++) { |
---|
114 | assertFalse(cluster.contains(dataNodes[i])); |
---|
115 | } |
---|
116 | assertEquals(0, cluster.getNumOfLeaves()); |
---|
117 | for(int i=0; i<dataNodes.length; i++) { |
---|
118 | cluster.add(dataNodes[i]); |
---|
119 | } |
---|
120 | } |
---|
121 | |
---|
122 | /** |
---|
123 | * This picks a large number of nodes at random in order to ensure coverage |
---|
124 | * |
---|
125 | * @param numNodes the number of nodes |
---|
126 | * @param excludedScope the excluded scope |
---|
127 | * @return the frequency that nodes were chosen |
---|
128 | */ |
---|
129 | private Map<Node, Integer> pickNodesAtRandom(int numNodes, |
---|
130 | String excludedScope) { |
---|
131 | Map<Node, Integer> frequency = new HashMap<Node, Integer>(); |
---|
132 | for (DatanodeDescriptor dnd : dataNodes) { |
---|
133 | frequency.put(dnd, 0); |
---|
134 | } |
---|
135 | |
---|
136 | for (int j = 0; j < numNodes; j++) { |
---|
137 | Node random = cluster.chooseRandom(excludedScope); |
---|
138 | frequency.put(random, frequency.get(random) + 1); |
---|
139 | } |
---|
140 | return frequency; |
---|
141 | } |
---|
142 | |
---|
143 | /** |
---|
144 | * This test checks that chooseRandom works for an excluded node. |
---|
145 | */ |
---|
146 | public void testChooseRandomExcludedNode() { |
---|
147 | String scope = "~" + NodeBase.getPath(dataNodes[0]); |
---|
148 | Map<Node, Integer> frequency = pickNodesAtRandom(100, scope); |
---|
149 | |
---|
150 | for (Node key : dataNodes) { |
---|
151 | // all nodes except the first should be more than zero |
---|
152 | assertTrue(frequency.get(key) > 0 || key == dataNodes[0]); |
---|
153 | } |
---|
154 | } |
---|
155 | |
---|
156 | /** |
---|
157 | * This test checks that chooseRandom works for an excluded rack. |
---|
158 | */ |
---|
159 | public void testChooseRandomExcludedRack() { |
---|
160 | Map<Node, Integer> frequency = pickNodesAtRandom(100, "~" + "/d2"); |
---|
161 | // all the nodes on the second rack should be zero |
---|
162 | for (int j = 0; j < dataNodes.length; j++) { |
---|
163 | int freq = frequency.get(dataNodes[j]); |
---|
164 | if (dataNodes[j].getNetworkLocation().startsWith("/d2")) { |
---|
165 | assertEquals(0, freq); |
---|
166 | } else { |
---|
167 | assertTrue(freq > 0); |
---|
168 | } |
---|
169 | } |
---|
170 | } |
---|
171 | } |
---|