package no.dusken.aranea.ehcache;
import net.sf.ehcache.CacheException;
import net.sf.ehcache.constructs.blocking.LockTimeoutException;
import net.sf.ehcache.constructs.web.AlreadyCommittedException;
import net.sf.ehcache.constructs.web.AlreadyGzippedException;
import net.sf.ehcache.constructs.web.PageInfo;
import net.sf.ehcache.constructs.web.filter.FilterNonReentrantException;
import net.sf.ehcache.constructs.web.filter.SimplePageCachingFilter;
import javax.servlet.FilterChain;
import javax.servlet.FilterConfig;
import javax.servlet.http.HttpServletRequest;
import javax.servlet.http.HttpServletResponse;
/**
* @author Marvin B. Lillehaug <lillehau@underdusken.no
* Extends SimplePageCachingFilter, adding the ability to exclude urls from being cached.
* To exclude a url from cache specify it in the exclude init param. Does Gzipping anyway.
* It is also possible to exclude a url totally from filtering by specifying it in the forward init param.
* This is useful when several cachingfilters are used in combination.
* Both init params are regexps and comma separated.
*/
public class ExcludablePageCachingFilter extends SimplePageCachingFilter {
/**
* A array of strings that contain urls that should
* not be cached.
* e.g. /admin.+ to disable caching of admin.
*/
private String[] excludeRegexps;
/**
* A array of strings that contains urls that should not be
* processed by this filter at all.
* This is usually because some other filter takes care of this.
*/
private String[] forward;
@Override
public void doInit(FilterConfig filterConfig) throws CacheException {
excludeRegexps = super.filterConfig.getInitParameter("exclude").split(",");
forward = super.filterConfig.getInitParameter("forward").split(",");
super.doInit(filterConfig);
}
@Override
protected PageInfo buildPageInfo(HttpServletRequest request, HttpServletResponse response, FilterChain chain) throws Exception {
String url = request.getRequestURI() + request.getQueryString();
if(shouldExclude(url, excludeRegexps)){
return buildPage(request, response, chain);
}else if(shouldExclude(url, forward)){
chain.doFilter(request, response);
return null;
}else{
return super.buildPageInfo(request, response, chain);
}
}
/**
* Checks whether the given url matches any of the strings in regexps.
*/
private boolean shouldExclude(String url, String[] regexps) {
for(String regexp: regexps){
if(url.matches(regexp)){
return true;
}
}
return false;
}
/**
* This is almost the same code as in super class.
* The difference is the code
*
* //return on error or redirect code
* int statusCode = pageInfo.getStatusCode();
* if (statusCode != HttpServletResponse.SC_OK) {
* return;
* }
* that was after PageInfo pageInfo = buildPageInfo(request, response, chain);
* The above code made the filter cut the connection when the statusCode was not 200.
*/
@Override
protected void doFilter(HttpServletRequest request, HttpServletResponse response, FilterChain chain) throws AlreadyGzippedException, AlreadyCommittedException, FilterNonReentrantException, LockTimeoutException, Exception {
if (response.isCommitted()) {
throw new AlreadyCommittedException("Response already committed before doing buildPage.");
}
logRequestHeaders(request);
PageInfo pageInfo = buildPageInfo(request, response, chain);
/*
When a url is on the forward list we get null from buildPageInfo
*/
if (pageInfo != null && !response.isCommitted()) {
writeResponse(request, response, pageInfo);
}
}
}