source: proiecte/HadoopJUnit/hadoop-0.20.1/src/test/org/apache/hadoop/hdfs/TestSetTimes.java @ 120

Last change on this file since 120 was 120, checked in by (none), 14 years ago

Added the mail files for the Hadoop JUNit Project

  • Property svn:executable set to *
File size: 6.9 KB
Line 
1/**
2 * Licensed to the Apache Software Foundation (ASF) under one
3 * or more contributor license agreements.  See the NOTICE file
4 * distributed with this work for additional information
5 * regarding copyright ownership.  The ASF licenses this file
6 * to you under the Apache License, Version 2.0 (the
7 * "License"); you may not use this file except in compliance
8 * with the License.  You may obtain a copy of the License at
9 *
10 *     http://www.apache.org/licenses/LICENSE-2.0
11 *
12 * Unless required by applicable law or agreed to in writing, software
13 * distributed under the License is distributed on an "AS IS" BASIS,
14 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
15 * See the License for the specific language governing permissions and
16 * limitations under the License.
17 */
18package org.apache.hadoop.hdfs;
19
20import junit.framework.TestCase;
21import java.io.*;
22import java.util.Random;
23import java.net.*;
24import org.apache.hadoop.conf.Configuration;
25import org.apache.hadoop.hdfs.protocol.DatanodeInfo;
26import org.apache.hadoop.hdfs.protocol.FSConstants.DatanodeReportType;
27import org.apache.hadoop.fs.FSDataOutputStream;
28import org.apache.hadoop.fs.FileSystem;
29import org.apache.hadoop.fs.Path;
30import org.apache.hadoop.fs.FileStatus;
31import java.text.SimpleDateFormat;
32import java.util.Date;
33
34/**
35 * This class tests the access time on files.
36 *
37 */
38public class TestSetTimes extends TestCase {
39  static final long seed = 0xDEADBEEFL;
40  static final int blockSize = 8192;
41  static final int fileSize = 16384;
42  static final int numDatanodes = 1;
43
44  static final SimpleDateFormat dateForm = new SimpleDateFormat("yyyy-MM-dd HH:mm");
45
46  Random myrand = new Random();
47  Path hostsFile;
48  Path excludeFile;
49
50  private FSDataOutputStream writeFile(FileSystem fileSys, Path name, int repl)
51    throws IOException {
52    FSDataOutputStream stm = fileSys.create(name, true, 
53                                            fileSys.getConf().getInt("io.file.buffer.size", 4096),
54                                            (short)repl, (long)blockSize);
55    byte[] buffer = new byte[fileSize];
56    Random rand = new Random(seed);
57    rand.nextBytes(buffer);
58    stm.write(buffer);
59    return stm;
60  }
61 
62  private void cleanupFile(FileSystem fileSys, Path name) throws IOException {
63    assertTrue(fileSys.exists(name));
64    fileSys.delete(name, true);
65    assertTrue(!fileSys.exists(name));
66  }
67
68  private void printDatanodeReport(DatanodeInfo[] info) {
69    System.out.println("-------------------------------------------------");
70    for (int i = 0; i < info.length; i++) {
71      System.out.println(info[i].getDatanodeReport());
72      System.out.println();
73    }
74  }
75
76  /**
77   * Tests mod & access time in DFS.
78   */
79  public void testTimes() throws IOException {
80    Configuration conf = new Configuration();
81    final int MAX_IDLE_TIME = 2000; // 2s
82    conf.setInt("ipc.client.connection.maxidletime", MAX_IDLE_TIME);
83    conf.setInt("heartbeat.recheck.interval", 1000);
84    conf.setInt("dfs.heartbeat.interval", 1);
85
86
87    MiniDFSCluster cluster = new MiniDFSCluster(conf, numDatanodes, true, null);
88    cluster.waitActive();
89    final int nnport = cluster.getNameNodePort();
90    InetSocketAddress addr = new InetSocketAddress("localhost", 
91                                                   cluster.getNameNodePort());
92    DFSClient client = new DFSClient(addr, conf);
93    DatanodeInfo[] info = client.datanodeReport(DatanodeReportType.LIVE);
94    assertEquals("Number of Datanodes ", numDatanodes, info.length);
95    FileSystem fileSys = cluster.getFileSystem();
96    int replicas = 1;
97    assertTrue(fileSys instanceof DistributedFileSystem);
98
99    try {
100      //
101      // create file and record atime/mtime
102      //
103      System.out.println("Creating testdir1 and testdir1/test1.dat.");
104      Path dir1 = new Path("testdir1");
105      Path file1 = new Path(dir1, "test1.dat");
106      FSDataOutputStream stm = writeFile(fileSys, file1, replicas);
107      FileStatus stat = fileSys.getFileStatus(file1);
108      long atimeBeforeClose = stat.getAccessTime();
109      String adate = dateForm.format(new Date(atimeBeforeClose));
110      System.out.println("atime on " + file1 + " before close is " + 
111                         adate + " (" + atimeBeforeClose + ")");
112      assertTrue(atimeBeforeClose != 0);
113      stm.close();
114
115      stat = fileSys.getFileStatus(file1);
116      long atime1 = stat.getAccessTime();
117      long mtime1 = stat.getModificationTime();
118      adate = dateForm.format(new Date(atime1));
119      String mdate = dateForm.format(new Date(mtime1));
120      System.out.println("atime on " + file1 + " is " + adate + 
121                         " (" + atime1 + ")");
122      System.out.println("mtime on " + file1 + " is " + mdate + 
123                         " (" + mtime1 + ")");
124      assertTrue(atime1 != 0);
125
126      //
127      // record dir times
128      //
129      stat = fileSys.getFileStatus(dir1);
130      long mdir1 = stat.getAccessTime();
131      assertTrue(mdir1 == 0);
132
133      // set the access time to be one day in the past
134      long atime2 = atime1 - (24L * 3600L * 1000L);
135      fileSys.setTimes(file1, -1, atime2);
136
137      // check new access time on file
138      stat = fileSys.getFileStatus(file1);
139      long atime3 = stat.getAccessTime();
140      String adate3 = dateForm.format(new Date(atime3));
141      System.out.println("new atime on " + file1 + " is " + 
142                         adate3 + " (" + atime3 + ")");
143      assertTrue(atime2 == atime3);
144      assertTrue(mtime1 == stat.getModificationTime());
145
146      // set the modification time to be 1 hour in the past
147      long mtime2 = mtime1 - (3600L * 1000L);
148      fileSys.setTimes(file1, mtime2, -1);
149
150      // check new modification time on file
151      stat = fileSys.getFileStatus(file1);
152      long mtime3 = stat.getModificationTime();
153      String mdate3 = dateForm.format(new Date(mtime3));
154      System.out.println("new mtime on " + file1 + " is " + 
155                         mdate3 + " (" + mtime3 + ")");
156      assertTrue(atime2 == stat.getAccessTime());
157      assertTrue(mtime2 == mtime3);
158
159      // shutdown cluster and restart
160      cluster.shutdown();
161      try {Thread.sleep(2*MAX_IDLE_TIME);} catch (InterruptedException e) {}
162      cluster = new MiniDFSCluster(nnport, conf, 1, false, true,
163                                   null, null, null);
164      cluster.waitActive();
165      fileSys = cluster.getFileSystem();
166
167      // verify that access times and modification times persist after a
168      // cluster restart.
169      System.out.println("Verifying times after cluster restart");
170      stat = fileSys.getFileStatus(file1);
171      assertTrue(atime2 == stat.getAccessTime());
172      assertTrue(mtime3 == stat.getModificationTime());
173   
174      cleanupFile(fileSys, file1);
175      cleanupFile(fileSys, dir1);
176    } catch (IOException e) {
177      info = client.datanodeReport(DatanodeReportType.ALL);
178      printDatanodeReport(info);
179      throw e;
180    } finally {
181      fileSys.close();
182      cluster.shutdown();
183    }
184  }
185
186  public static void main(String[] args) throws Exception {
187    new TestSetTimes().testTimes();
188  }
189}
Note: See TracBrowser for help on using the repository browser.