source: proiecte/HadoopJUnit/hadoop-0.20.1/src/test/org/apache/hadoop/hdfs/TestLocalDFS.java @ 120

Last change on this file since 120 was 120, checked in by (none), 14 years ago

Added the mail files for the Hadoop JUNit Project

  • Property svn:executable set to *
File size: 3.3 KB
Line 
1/**
2 * Licensed to the Apache Software Foundation (ASF) under one
3 * or more contributor license agreements.  See the NOTICE file
4 * distributed with this work for additional information
5 * regarding copyright ownership.  The ASF licenses this file
6 * to you under the Apache License, Version 2.0 (the
7 * "License"); you may not use this file except in compliance
8 * with the License.  You may obtain a copy of the License at
9 *
10 *     http://www.apache.org/licenses/LICENSE-2.0
11 *
12 * Unless required by applicable law or agreed to in writing, software
13 * distributed under the License is distributed on an "AS IS" BASIS,
14 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
15 * See the License for the specific language governing permissions and
16 * limitations under the License.
17 */
18package org.apache.hadoop.hdfs;
19
20import junit.framework.TestCase;
21import java.io.*;
22import org.apache.hadoop.conf.Configuration;
23import org.apache.hadoop.fs.FileSystem;
24import org.apache.hadoop.fs.Path;
25
26/**
27 * This class tests the DFS class via the FileSystem interface in a single node
28 * mini-cluster.
29 */
30public class TestLocalDFS extends TestCase {
31
32  private void writeFile(FileSystem fileSys, Path name) throws IOException {
33    DataOutputStream stm = fileSys.create(name);
34    stm.writeBytes("oom");
35    stm.close();
36  }
37 
38  private void readFile(FileSystem fileSys, Path name) throws IOException {
39    DataInputStream stm = fileSys.open(name);
40    byte[] buffer = new byte[4];
41    int bytesRead = stm.read(buffer, 0 , 4);
42    assertEquals("oom", new String(buffer, 0 , bytesRead));
43    stm.close();
44  }
45 
46  private void cleanupFile(FileSystem fileSys, Path name) throws IOException {
47    assertTrue(fileSys.exists(name));
48    fileSys.delete(name, true);
49    assertTrue(!fileSys.exists(name));
50  }
51
52  static String getUserName(FileSystem fs) {
53    if (fs instanceof DistributedFileSystem) {
54      return ((DistributedFileSystem)fs).dfs.ugi.getUserName();
55    }
56    return System.getProperty("user.name");
57  }
58
59  /**
60   * Tests get/set working directory in DFS.
61   */
62  public void testWorkingDirectory() throws IOException {
63    Configuration conf = new Configuration();
64    MiniDFSCluster cluster = new MiniDFSCluster(conf, 1, true, null);
65    FileSystem fileSys = cluster.getFileSystem();
66    try {
67      Path orig_path = fileSys.getWorkingDirectory();
68      assertTrue(orig_path.isAbsolute());
69      Path file1 = new Path("somewhat/random.txt");
70      writeFile(fileSys, file1);
71      assertTrue(fileSys.exists(new Path(orig_path, file1.toString())));
72      fileSys.delete(file1, true);
73      Path subdir1 = new Path("/somewhere");
74      fileSys.setWorkingDirectory(subdir1);
75      writeFile(fileSys, file1);
76      cleanupFile(fileSys, new Path(subdir1, file1.toString()));
77      Path subdir2 = new Path("else");
78      fileSys.setWorkingDirectory(subdir2);
79      writeFile(fileSys, file1);
80      readFile(fileSys, file1);
81      cleanupFile(fileSys, new Path(new Path(subdir1, subdir2.toString()),
82                                    file1.toString()));
83
84      // test home directory
85      Path home = new Path("/user/" + getUserName(fileSys))
86        .makeQualified(fileSys);
87      Path fsHome = fileSys.getHomeDirectory();
88      assertEquals(home, fsHome);
89
90    } finally {
91      fileSys.close();
92      cluster.shutdown();
93    }
94  }
95}
Note: See TracBrowser for help on using the repository browser.