source: proiecte/HadoopJUnit/hadoop-0.20.1/src/test/org/apache/hadoop/security/authorize/TestServiceLevelAuthorization.java @ 120

Last change on this file since 120 was 120, checked in by (none), 14 years ago

Added the mail files for the Hadoop JUNit Project

  • Property svn:executable set to *
File size: 5.9 KB
Line 
1/**
2 * Licensed to the Apache Software Foundation (ASF) under one
3 * or more contributor license agreements.  See the NOTICE file
4 * distributed with this work for additional information
5 * regarding copyright ownership.  The ASF licenses this file
6 * to you under the Apache License, Version 2.0 (the
7 * "License"); you may not use this file except in compliance
8 * with the License.  You may obtain a copy of the License at
9 *
10 *     http://www.apache.org/licenses/LICENSE-2.0
11 *
12 * Unless required by applicable law or agreed to in writing, software
13 * distributed under the License is distributed on an "AS IS" BASIS,
14 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
15 * See the License for the specific language governing permissions and
16 * limitations under the License.
17 */
18package org.apache.hadoop.security.authorize;
19
20import java.io.File;
21import java.io.FileWriter;
22import java.io.IOException;
23
24import org.apache.hadoop.conf.Configuration;
25import org.apache.hadoop.fs.FileSystem;
26import org.apache.hadoop.fs.FileUtil;
27import org.apache.hadoop.fs.Path;
28import org.apache.hadoop.hdfs.HDFSPolicyProvider;
29import org.apache.hadoop.hdfs.MiniDFSCluster;
30import org.apache.hadoop.hdfs.tools.DFSAdmin;
31import org.apache.hadoop.ipc.RemoteException;
32import org.apache.hadoop.mapred.JobConf;
33import org.apache.hadoop.mapred.MiniMRCluster;
34import org.apache.hadoop.mapred.TestMiniMRWithDFS;
35import org.apache.hadoop.security.UnixUserGroupInformation;
36import org.apache.hadoop.util.StringUtils;
37
38import junit.framework.TestCase;
39
40public class TestServiceLevelAuthorization extends TestCase {
41  public void testServiceLevelAuthorization() throws Exception {
42    MiniDFSCluster dfs = null;
43    MiniMRCluster mr = null;
44    FileSystem fileSys = null;
45    try {
46      final int slaves = 4;
47
48      // Turn on service-level authorization
49      Configuration conf = new Configuration();
50      conf.setClass(PolicyProvider.POLICY_PROVIDER_CONFIG, 
51                    HadoopPolicyProvider.class, PolicyProvider.class);
52      conf.setBoolean(ServiceAuthorizationManager.SERVICE_AUTHORIZATION_CONFIG, 
53                      true);
54     
55      // Start the mini clusters
56      dfs = new MiniDFSCluster(conf, slaves, true, null);
57      fileSys = dfs.getFileSystem();
58      JobConf mrConf = new JobConf(conf);
59      mr = new MiniMRCluster(slaves, fileSys.getUri().toString(), 1, 
60                             null, null, mrConf);
61
62      // Run examples
63      TestMiniMRWithDFS.runPI(mr, mr.createJobConf(mrConf));
64      TestMiniMRWithDFS.runWordCount(mr, mr.createJobConf(mrConf));
65    } finally {
66      if (dfs != null) { dfs.shutdown(); }
67      if (mr != null) { mr.shutdown();
68      }
69    }
70  }
71 
72  private static final String DUMMY_ACL = "nouser nogroup";
73  private static final String UNKNOWN_USER = "dev,null";
74 
75  private void rewriteHadoopPolicyFile(File policyFile) throws IOException {
76    FileWriter fos = new FileWriter(policyFile);
77    PolicyProvider policyProvider = new HDFSPolicyProvider();
78    fos.write("<configuration>\n");
79    for (Service service : policyProvider.getServices()) {
80      String key = service.getServiceKey();
81      String value ="*";
82      if (key.equals("security.refresh.policy.protocol.acl")) {
83        value = DUMMY_ACL;
84      }
85      fos.write("<property><name>"+ key + "</name><value>" + value + 
86                "</value></property>\n");
87      System.err.println("<property><name>"+ key + "</name><value>" + value + 
88          "</value></property>\n");
89    }
90    fos.write("</configuration>\n");
91    fos.close();
92  }
93 
94  private void refreshPolicy(Configuration conf)  throws IOException {
95    DFSAdmin dfsAdmin = new DFSAdmin(conf);
96    dfsAdmin.refreshServiceAcl();
97  }
98 
99  public void testRefresh() throws Exception {
100    MiniDFSCluster dfs = null;
101    try {
102      final int slaves = 4;
103
104      // Turn on service-level authorization
105      Configuration conf = new Configuration();
106      conf.setClass(PolicyProvider.POLICY_PROVIDER_CONFIG, 
107                    HDFSPolicyProvider.class, PolicyProvider.class);
108      conf.setBoolean(ServiceAuthorizationManager.SERVICE_AUTHORIZATION_CONFIG, 
109                      true);
110     
111      // Start the mini dfs cluster
112      dfs = new MiniDFSCluster(conf, slaves, true, null);
113
114      // Refresh the service level authorization policy
115      refreshPolicy(conf);
116     
117      // Simulate an 'edit' of hadoop-policy.xml
118      String confDir = System.getProperty("test.build.extraconf", 
119                                          "build/test/extraconf");
120      File policyFile = new File(confDir, ConfiguredPolicy.HADOOP_POLICY_FILE);
121      String policyFileCopy = ConfiguredPolicy.HADOOP_POLICY_FILE + ".orig";
122      FileUtil.copy(policyFile, FileSystem.getLocal(conf),   // first save original
123                    new Path(confDir, policyFileCopy), false, conf);
124      rewriteHadoopPolicyFile(                               // rewrite the file
125          new File(confDir, ConfiguredPolicy.HADOOP_POLICY_FILE));
126     
127      // Refresh the service level authorization policy
128      refreshPolicy(conf);
129     
130      // Refresh the service level authorization policy once again,
131      // this time it should fail!
132      try {
133        // Note: hadoop-policy.xml for tests has
134        // security.refresh.policy.protocol.acl = ${user.name}
135        conf.set(UnixUserGroupInformation.UGI_PROPERTY_NAME, UNKNOWN_USER);
136        refreshPolicy(conf);
137        fail("Refresh of NameNode's policy file cannot be successful!");
138      } catch (RemoteException re) {
139        System.out.println("Good, refresh worked... refresh failed with: " + 
140                           StringUtils.stringifyException(re.unwrapRemoteException()));
141      } finally {
142        // Reset to original hadoop-policy.xml
143        FileUtil.fullyDelete(new File(confDir, 
144            ConfiguredPolicy.HADOOP_POLICY_FILE));
145        FileUtil.replaceFile(new File(confDir, policyFileCopy), new File(confDir, ConfiguredPolicy.HADOOP_POLICY_FILE));
146      }
147    } finally {
148      if (dfs != null) { dfs.shutdown(); }
149    }
150  }
151
152}
Note: See TracBrowser for help on using the repository browser.