source: proiecte/HadoopJUnit/hadoop-0.20.1/src/test/org/apache/hadoop/fs/TestUrlStreamHandler.java @ 120

Last change on this file since 120 was 120, checked in by (none), 14 years ago

Added the mail files for the Hadoop JUNit Project

  • Property svn:executable set to *
File size: 4.6 KB
Line 
1/**
2 * Licensed to the Apache Software Foundation (ASF) under one
3 * or more contributor license agreements.  See the NOTICE file
4 * distributed with this work for additional information
5 * regarding copyright ownership.  The ASF licenses this file
6 * to you under the Apache License, Version 2.0 (the
7 * "License"); you may not use this file except in compliance
8 * with the License.  You may obtain a copy of the License at
9 *
10 *     http://www.apache.org/licenses/LICENSE-2.0
11 *
12 * Unless required by applicable law or agreed to in writing, software
13 * distributed under the License is distributed on an "AS IS" BASIS,
14 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
15 * See the License for the specific language governing permissions and
16 * limitations under the License.
17 */
18package org.apache.hadoop.fs;
19
20import java.io.File;
21import java.io.IOException;
22import java.io.InputStream;
23import java.io.OutputStream;
24import java.net.URI;
25import java.net.URISyntaxException;
26import java.net.URL;
27
28import junit.framework.TestCase;
29
30import org.apache.hadoop.conf.Configuration;
31import org.apache.hadoop.hdfs.MiniDFSCluster;
32import org.apache.hadoop.fs.FileSystem;
33import org.apache.hadoop.fs.FsUrlStreamHandlerFactory;
34import org.apache.hadoop.fs.Path;
35
36/**
37 * Test of the URL stream handler factory.
38 */
39public class TestUrlStreamHandler extends TestCase {
40
41  /**
42   * Test opening and reading from an InputStream through a hdfs:// URL.
43   * <p>
44   * First generate a file with some content through the FileSystem API, then
45   * try to open and read the file through the URL stream API.
46   *
47   * @throws IOException
48   */
49  public void testDfsUrls() throws IOException {
50
51    Configuration conf = new Configuration();
52    MiniDFSCluster cluster = new MiniDFSCluster(conf, 2, true, null);
53    FileSystem fs = cluster.getFileSystem();
54
55    // Setup our own factory
56    // setURLSteramHandlerFactor is can be set at most once in the JVM
57    // the new URLStreamHandler is valid for all tests cases
58    // in TestStreamHandler
59    FsUrlStreamHandlerFactory factory =
60        new org.apache.hadoop.fs.FsUrlStreamHandlerFactory();
61    java.net.URL.setURLStreamHandlerFactory(factory);
62
63    Path filePath = new Path("/thefile");
64
65    try {
66      byte[] fileContent = new byte[1024];
67      for (int i = 0; i < fileContent.length; ++i)
68        fileContent[i] = (byte) i;
69
70      // First create the file through the FileSystem API
71      OutputStream os = fs.create(filePath);
72      os.write(fileContent);
73      os.close();
74
75      // Second, open and read the file content through the URL API
76      URI uri = fs.getUri();
77      URL fileURL =
78          new URL(uri.getScheme(), uri.getHost(), uri.getPort(), filePath
79              .toString());
80
81      InputStream is = fileURL.openStream();
82      assertNotNull(is);
83
84      byte[] bytes = new byte[4096];
85      assertEquals(1024, is.read(bytes));
86      is.close();
87
88      for (int i = 0; i < fileContent.length; ++i)
89        assertEquals(fileContent[i], bytes[i]);
90
91      // Cleanup: delete the file
92      fs.delete(filePath, false);
93
94    } finally {
95      fs.close();
96      cluster.shutdown();
97    }
98
99  }
100
101  /**
102   * Test opening and reading from an InputStream through a file:// URL.
103   *
104   * @throws IOException
105   * @throws URISyntaxException
106   */
107  public void testFileUrls() throws IOException, URISyntaxException {
108    // URLStreamHandler is already set in JVM by testDfsUrls()
109    Configuration conf = new Configuration();
110
111    // Locate the test temporary directory.
112    File tmpDir = new File(conf.get("hadoop.tmp.dir"));
113    if (!tmpDir.exists()) {
114      if (!tmpDir.mkdirs())
115        throw new IOException("Cannot create temporary directory: " + tmpDir);
116    }
117
118    File tmpFile = new File(tmpDir, "thefile");
119    URI uri = tmpFile.toURI();
120
121    FileSystem fs = FileSystem.get(uri, conf);
122
123    try {
124      byte[] fileContent = new byte[1024];
125      for (int i = 0; i < fileContent.length; ++i)
126        fileContent[i] = (byte) i;
127
128      // First create the file through the FileSystem API
129      OutputStream os = fs.create(new Path(uri.getPath()));
130      os.write(fileContent);
131      os.close();
132
133      // Second, open and read the file content through the URL API.
134      URL fileURL = uri.toURL();
135
136      InputStream is = fileURL.openStream();
137      assertNotNull(is);
138
139      byte[] bytes = new byte[4096];
140      assertEquals(1024, is.read(bytes));
141      is.close();
142
143      for (int i = 0; i < fileContent.length; ++i)
144        assertEquals(fileContent[i], bytes[i]);
145
146      // Cleanup: delete the file
147      fs.delete(new Path(uri.getPath()), false);
148
149    } finally {
150      fs.close();
151    }
152
153  }
154
155}
Note: See TracBrowser for help on using the repository browser.