source: proiecte/HadoopJUnit/hadoop-0.20.1/src/test/org/apache/hadoop/http/TestServletFilter.java @ 120

Last change on this file since 120 was 120, checked in by (none), 14 years ago

Added the mail files for the Hadoop JUNit Project

  • Property svn:executable set to *
File size: 4.4 KB
Line 
1/**
2 * Licensed to the Apache Software Foundation (ASF) under one
3 * or more contributor license agreements.  See the NOTICE file
4 * distributed with this work for additional information
5 * regarding copyright ownership.  The ASF licenses this file
6 * to you under the Apache License, Version 2.0 (the
7 * "License"); you may not use this file except in compliance
8 * with the License.  You may obtain a copy of the License at
9 *
10 *     http://www.apache.org/licenses/LICENSE-2.0
11 *
12 * Unless required by applicable law or agreed to in writing, software
13 * distributed under the License is distributed on an "AS IS" BASIS,
14 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
15 * See the License for the specific language governing permissions and
16 * limitations under the License.
17 */
18package org.apache.hadoop.http;
19
20import java.io.BufferedReader;
21import java.io.IOException;
22import java.io.InputStreamReader;
23import java.net.URL;
24import java.net.URLConnection;
25import java.util.Random;
26
27import javax.servlet.Filter;
28import javax.servlet.FilterChain;
29import javax.servlet.FilterConfig;
30import javax.servlet.ServletException;
31import javax.servlet.ServletRequest;
32import javax.servlet.ServletResponse;
33import javax.servlet.http.HttpServletRequest;
34
35import org.apache.commons.logging.Log;
36import org.apache.commons.logging.LogFactory;
37import org.apache.hadoop.conf.Configuration;
38
39public class TestServletFilter extends junit.framework.TestCase {
40  static final Log LOG = LogFactory.getLog(HttpServer.class);
41  static volatile String uri = null; 
42
43  /** A very simple filter which record the uri filtered. */
44  static public class SimpleFilter implements Filter {
45    private FilterConfig filterConfig = null;
46
47    public void init(FilterConfig filterConfig) {
48      this.filterConfig = filterConfig;
49    }
50
51    public void destroy() {
52      this.filterConfig = null;
53    }
54
55    public void doFilter(ServletRequest request, ServletResponse response,
56        FilterChain chain) throws IOException, ServletException {
57      if (filterConfig == null)
58         return;
59
60      uri = ((HttpServletRequest)request).getRequestURI();
61      LOG.info("filtering " + uri);
62      chain.doFilter(request, response);
63    }
64
65    /** Configuration for the filter */
66    static public class Initializer extends FilterInitializer {
67      public Initializer() {}
68
69      void initFilter(FilterContainer container) {
70        container.addFilter("simple", SimpleFilter.class.getName(), null);
71      }
72    }
73  }
74 
75 
76  /** access a url, ignoring some IOException such as the page does not exist */
77  static void access(String urlstring) throws IOException {
78    LOG.warn("access " + urlstring);
79    URL url = new URL(urlstring);
80    URLConnection connection = url.openConnection();
81    connection.connect();
82   
83    try {
84      BufferedReader in = new BufferedReader(new InputStreamReader(
85          connection.getInputStream()));
86      try {
87        for(; in.readLine() != null; );
88      } finally {
89        in.close();
90      }
91    } catch(IOException ioe) {
92      LOG.warn("urlstring=" + urlstring, ioe);
93    }
94  }
95
96  public void testServletFilter() throws Exception {
97    Configuration conf = new Configuration();
98   
99    //start a http server with CountingFilter
100    conf.set(HttpServer.FILTER_INITIALIZER_PROPERTY,
101        SimpleFilter.Initializer.class.getName());
102    HttpServer http = new HttpServer("datanode", "localhost", 0, true, conf);
103    http.start();
104
105    final String fsckURL = "/fsck";
106    final String stacksURL = "/stacks";
107    final String ajspURL = "/a.jsp";
108    final String logURL = "/logs/a.log";
109    final String hadooplogoURL = "/static/hadoop-logo.jpg";
110   
111    final String[] urls = {fsckURL, stacksURL, ajspURL, logURL, hadooplogoURL};
112    final Random ran = new Random();
113    final int[] sequence = new int[50];
114
115    //generate a random sequence and update counts
116    for(int i = 0; i < sequence.length; i++) {
117      sequence[i] = ran.nextInt(urls.length);
118    }
119
120    //access the urls as the sequence
121    final String prefix = "http://localhost:" + http.getPort();
122    try {
123      for(int i = 0; i < sequence.length; i++) {
124        access(prefix + urls[sequence[i]]);
125
126        //make sure everything except fsck get filtered
127        if (sequence[i] == 0) {
128          assertEquals(null, uri);
129        } else {
130          assertEquals(urls[sequence[i]], uri);
131          uri = null;
132        }
133      }
134    } finally {
135      http.stop();
136    }
137  }
138}
Note: See TracBrowser for help on using the repository browser.